diff --git a/MacroLibX b/MacroLibX deleted file mode 160000 index 8267409..0000000 --- a/MacroLibX +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 826740906fa577bb414a75ae1f8acc3c806c0b1f diff --git a/MacroLibX/CONTRIBUTING.md b/MacroLibX/CONTRIBUTING.md new file mode 100644 index 0000000..4bbaa90 --- /dev/null +++ b/MacroLibX/CONTRIBUTING.md @@ -0,0 +1,17 @@ +# How to contribute to the MacroLibX + +For any questions, suggestions or help [contact me](mailto:contact@kbz8.me) + +## **Found a bug?** + +* Avoid opening any new issues without having checked if your problem has already been reported. If there are no currently open issues that fit your problem's description, feel free to [add it](https://github.com/seekrs/MacroLibX/issues/new). + +* When writing an issue make sure to include a clear title and description as well as having filled out all the necessary information: System info, OS, OS-Version, ... + +* If possible add pictures of the issue. + +## Contributing + +Before thinking of adding a contribution, think. Is it necessary? Will this actually be a useful/required feature? Is your implementation good? +Provide clear and documented explanation as to what was changed. + diff --git a/MacroLibX/LICENSE b/MacroLibX/LICENSE new file mode 100644 index 0000000..08cd0c4 --- /dev/null +++ b/MacroLibX/LICENSE @@ -0,0 +1,20 @@ + MIT License + Copyright (c) 2022-2024 kbz_8 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/MacroLibX/Makefile b/MacroLibX/Makefile new file mode 100644 index 0000000..831a281 --- /dev/null +++ b/MacroLibX/Makefile @@ -0,0 +1,96 @@ +# **************************************************************************** # +# # +# ::: :::::::: # +# Makefile :+: :+: :+: # +# +:+ +:+ +:+ # +# By: maldavid +#+ +:+ +#+ # +# +#+#+#+#+#+ +#+ # +# Created: 2022/10/04 16:43:41 by maldavid #+# #+# # +# Updated: 2023/12/31 01:09:30 by maldavid ### ########.fr # +# # +# **************************************************************************** # + +NAME = libmlx.so + +SRCS = $(wildcard $(addsuffix /*.cpp, ./src/core)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./src/platform)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./src/renderer)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./src/renderer/**)) + +OBJ_DIR = objs/makefile +OBJS = $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) + +OS = $(shell uname -s) +DEBUG ?= false +TOOLCHAIN ?= clang +IMAGES_OPTIMIZED ?= true +FORCE_INTEGRATED_GPU ?= false +GRAPHICS_MEMORY_DUMP ?= false + +MODE = "release" + +CXX = clang++ + +CXXFLAGS = -std=c++17 -O3 -fPIC -Wall -Wextra -Werror -DSDL_MAIN_HANDLED +INCLUDES = -I./includes -I./src -I./third_party + +LDLIBS = + +ifeq ($(TOOLCHAIN), gcc) + CXX = g++ + CXXFLAGS += -Wno-error=cpp +else + CXXFLAGS += -Wno-error=#warning +endif + +ifeq ($(OS), Darwin) + LDLIBS += -L /opt/homebrew/lib -lSDL2 + CXXFLAGS += -I /opt/homebrew/include + NAME = libmlx.dylib +endif + +ifeq ($(DEBUG), true) + CXXFLAGS += -g -D DEBUG + MODE = "debug" +endif + +ifeq ($(FORCE_INTEGRATED_GPU), true) + CXXFLAGS += -D FORCE_INTEGRATED_GPU +endif + +ifeq ($(IMAGES_OPTIMIZED), true) + CXXFLAGS += -D IMAGE_OPTIMIZED +endif + +ifeq ($(GRAPHICS_MEMORY_DUMP), true) + CXXFLAGS += -D GRAPHICS_MEMORY_DUMP +endif + +RM = rm -rf + +$(OBJ_DIR)/%.o: %.cpp + @printf "\033[1;32m[compiling... "$(MODE)" "$(CXX)"]\033[1;00m "$<"\n" + @$(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@ + +all: $(NAME) + +$(NAME): $(OBJ_DIR) $(OBJS) + @printf "\033[1;32m[linking ... "$(MODE)"]\033[1;00m "$@"\n" + @$(CXX) -shared -o $(NAME) $(OBJS) $(LDLIBS) + @printf "\033[1;32m[build finished]\033[1;00m\n" + +$(OBJ_DIR): + @mkdir -p $(sort $(addprefix $(OBJ_DIR)/, $(dir $(SRCS)))) + @printf "\033[1;32m[created objs directory]\033[1;00m\n" + +clean: + @$(RM) $(OBJ_DIR) + @printf "\033[1;32m[object files removed]\033[1;00m\n" + +fclean: clean + @$(RM) $(NAME) + @printf "\033[1;32m["$(NAME)" removed]\033[1;00m\n" + +re: fclean all + +.PHONY: all clean fclean re diff --git a/MacroLibX/README.md b/MacroLibX/README.md new file mode 100644 index 0000000..f0f654b --- /dev/null +++ b/MacroLibX/README.md @@ -0,0 +1,113 @@ +
+ drawing +
+ + + +
+
+ +
+
+ +###### MacroLibX, a rewrite of 42 School's MiniLibX using SDL2 and Vulkan. +The goal of this version is to provide a light, fast, and modern graphical tool while keeping the same API. + +## 🌟 Features + +### 🚀 Performances +Built on top of Vulkan, the MacroLibX takes advantage of its very low-level nature to achieve high performance with great control over available resources. + +### 💻 Cross-Platform +Designed to be totally cross-platform, it can run on any SDL2-supported platform that supports Vulkan (even the Nintendo Switch ! theoretically... ). + +### 🗿 Close to the old minilibx +One of the guidelines of this lib was to get as close as possible to the old minilibx API, and therefore to the educational choices of the old minilibx. + +### 📖 It's all FOSS +Everything in this repo is entirely free and open source, all available under the MIT license (even the third-party libraries used). + +### 🔍 Valgrind suppressions file +Experimental for now, a [suppressions file for valgrind](./valgrind.supp) is given to remove potential leaks comming from Xorg, Nvidia drivers, SDL2, or any other tool which the user has no control. It is far from perfect at the moment and may allow some leaks but it will block the majority. + +### ⛔ Error system +Strong error handling informing the user of problems with their code and even capable of informing them of graphics memory leaks that tools like Valgrind cannot detect. + +## 🖥️ Installation + +### Dependencies +You first need to install the proper dependencies for your operating-system. + +#### 🐧 Linux +Here are a few common cases for different Linux distributions: + +
+ + For Ubuntu/Debian-based distros: + +
sudo apt update
+sudo apt install libsdl2-2.0-0 libsdl2-dev build-essential
+
+
+ +
+ + For ArchLinux-based distros: + +
sudo pacman -S sdl2
+
+ +
+Note that you need to have up do date video drivers with libvulkan.so installed. + +#### 🍎 macOS +[MacroLibX](#) on macOS requires [SDL2](#) and [MoltenVK](https://github.com/KhronosGroup/MoltenVK). You can install both using the [Homebrew](https://brew.sh) package manager: +```sh +brew install molten-vk +brew install SDL2 +``` + +### 🪟 Windows +To build on Windows you may need to use the [xmake](https://xmake.io) build. [Here's](./XMAKE_BUILD.md) how you can use it. + +### Clone and Build +Finally, you can clone the Git repository. When inside it, run the GNU `make` command to compile MacroLibX. +```bash +git clone https://github.com/seekrs/MacroLibX.git +cd MacroLibX +make +``` + +## 🔨 Compile your project +To compile your project with MacroLibX, you just provide the shared library path in your compilation/linking command: + +```sh +clang myApp.c /path/to/MacroLibX/libmlx.so -lSDL2 +``` + +And you can enjoy your project + +

+ drawing +

+ +## ⚙️ Some compilation configurations + +### 📦 Compile mode +By default the mlx is built in release mode but you can switch to debug by using `make DEBUG=true`. + +### 🛠️ Set the toolchain +If you want to use `GCC` to build the mlx you can use `make TOOLCHAIN=gcc` + +### ⚠️⚠️⚠️ 🖼️ Image optimisations ⚠️⚠️⚠️ +If you run into glitches when writing or reading pixels from images you can turn off images optimisations by using `make IMAGES_OPTIMIZED=false`. + +### 🖥️ Force the integrated GPU (not recommended) +You can force the mlx to use your integrated GPU by using `make FORCE_INTEGRATED_GPU=true`. Note that there are a lot of chances that your application crashes by using that. + +### 💽 Dump the graphics memory +The mlx can dump it's graphics memory use to json files every two seconds by enabling this option `make GRAPHICS_MEMORY_DUMP=true`. + +## License +This project and all its files, even the [`third_party`](./third_party) directory or unless otherwise mentionned, are licenced under the [MIT license](./LICENSE). diff --git a/MacroLibX/XMAKE_BUILD.md b/MacroLibX/XMAKE_BUILD.md new file mode 100644 index 0000000..9022f40 --- /dev/null +++ b/MacroLibX/XMAKE_BUILD.md @@ -0,0 +1,47 @@ +# 🏗️ xmake build +To build on Windows (if you don't use [WSL](https://learn.microsoft.com/en-us/windows/wsl/install)), the MacroLibX uses [xmake](https://xmake.io), a build system which will download and compile all dependencies it won't find on your computer. + +## 💾 Install xmake +You can find how to install it on your system [here](https://xmake.io/#/guide/installation). Note that you can also download a [portable version](https://github.com/xmake-io/xmake/releases) of xmake if you wish not to install it. + +## ⚙️ Configure the MacroLibX build +Just as the Makfile build system, you can configure how xmake should build the MacroLibX. The base command to configure it is `xmake config [opts...]` or `xmake f [opts...]`. + +### 📦 Compile mode +You can configure xmake to build the mlx in debug mode or in release mode (release mode is enabled by default). To do so you can use `xmake config --mode=debug` or `xmake config --mode=release`. + +### 🛠️ Set the toolchain +To change the compilation toolchain you can use `xmake config --toolchain=[gcc|clang|...]` + +### ⚠️⚠️⚠️ 🖼️ Image optimisations ⚠️⚠️⚠️ +If you run into glitches when writing or reading pixels from images you can turn off images optimisations by using `xmake config --images_optimized=n`. + +### 🖥️ Force the integrated GPU (not recommended) +You can force the mlx to use your integrated GPU using `xmake config --force_integrated_gpu=y`. Note that there are a lot of chances that your application crashes by using that. + +### 💽 Dump the graphics memory +The mlx can dump it's graphics memory use to json files every two seconds by enabling this option `xmake config --graphics_memory_dump=y`. + +### 🪛 A possible build configuration +As a configuration example here's how the command can look like `xmake config --mode=debug --toolchain=clang --graphics_memory_dump=y --images_optimized=n` + +## 🚧 Build the lib + +### Compile using command-line (first method) +Once you're ready to compile the lib, run `xmake` (or `xmake -jX` if you wish not to use all your computer threads, with X being the number of threads you wish to use) and watch as the lib compiles. + +### Generate a project (second method) +xmake can also generate a project file for another tool: +* Visual Studio : `xmake project -k vs` +* CMakeLists.txt (which you can open in CLion and more) : `xmake project -k cmake` +* Makefile : `xmake project -k make` +* Ninja : `xmake project -k ninja` +* XCode : `xmake project -k xcode` + +You should now be able to open the project file with the tool of your choice. + +## 😋 Enjoy +Enjoy you project built with the mlx +

+ drawing +

diff --git a/MacroLibX/compile_commands.json b/MacroLibX/compile_commands.json new file mode 100644 index 0000000..ff20728 --- /dev/null +++ b/MacroLibX/compile_commands.json @@ -0,0 +1,642 @@ +[ + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/text_pipeline.o", + "src/renderer/text_pipeline.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/text_pipeline.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/text_pipeline.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/buffers/vk_vbo.o", + "src/renderer/buffers/vk_vbo.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/buffers/vk_vbo.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/buffers/vk_vbo.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/pixel_put.o", + "src/renderer/pixel_put.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/pixel_put.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/pixel_put.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/core/graphics.o", + "src/core/graphics.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/core/graphics.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/core/graphics.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/buffers/vk_ubo.o", + "src/renderer/buffers/vk_ubo.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/buffers/vk_ubo.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/buffers/vk_ubo.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/platform/inputs.o", + "src/platform/inputs.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/platform/inputs.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/platform/inputs.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/renderer.o", + "src/renderer/renderer.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/renderer.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/renderer.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/text_library.o", + "src/renderer/text_library.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/text_library.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/text_library.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/buffers/vk_buffer.o", + "src/renderer/buffers/vk_buffer.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/buffers/vk_buffer.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/buffers/vk_buffer.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/command/cmd_manager.o", + "src/renderer/command/cmd_manager.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/command/cmd_manager.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/command/cmd_manager.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/core/vk_device.o", + "src/renderer/core/vk_device.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_device.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_device.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/command/vk_cmd_buffer.o", + "src/renderer/command/vk_cmd_buffer.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/command/vk_cmd_buffer.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/command/vk_cmd_buffer.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/core/render_core.o", + "src/renderer/core/render_core.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/render_core.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/render_core.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/command/vk_cmd_pool.o", + "src/renderer/command/vk_cmd_pool.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/command/vk_cmd_pool.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/command/vk_cmd_pool.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/core/vk_instance.o", + "src/renderer/core/vk_instance.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_instance.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_instance.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/core/vk_semaphore.o", + "src/renderer/core/vk_semaphore.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_semaphore.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_semaphore.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/core/vk_queues.o", + "src/renderer/core/vk_queues.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_queues.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_queues.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/core/vk_validation_layers.o", + "src/renderer/core/vk_validation_layers.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_validation_layers.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_validation_layers.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/core/vk_surface.o", + "src/renderer/core/vk_surface.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_surface.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_surface.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/core/vk_fence.o", + "src/renderer/core/vk_fence.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_fence.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/core/vk_fence.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/descriptors/vk_descriptor_pool.o", + "src/renderer/descriptors/vk_descriptor_pool.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/descriptors/vk_descriptor_pool.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/descriptors/vk_descriptor_pool.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/images/texture_atlas.o", + "src/renderer/images/texture_atlas.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/images/texture_atlas.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/images/texture_atlas.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/descriptors/vk_descriptor_set.o", + "src/renderer/descriptors/vk_descriptor_set.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/descriptors/vk_descriptor_set.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/descriptors/vk_descriptor_set.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/swapchain/vk_framebuffer.o", + "src/renderer/swapchain/vk_framebuffer.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/swapchain/vk_framebuffer.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/swapchain/vk_framebuffer.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/pipeline/pipeline.o", + "src/renderer/pipeline/pipeline.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/pipeline/pipeline.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/pipeline/pipeline.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/images/vk_image.o", + "src/renderer/images/vk_image.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/images/vk_image.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/images/vk_image.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/descriptors/vk_descriptor_set_layout.o", + "src/renderer/descriptors/vk_descriptor_set_layout.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/descriptors/vk_descriptor_set_layout.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/descriptors/vk_descriptor_set_layout.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/swapchain/vk_imageview.o", + "src/renderer/swapchain/vk_imageview.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/swapchain/vk_imageview.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/swapchain/vk_imageview.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/swapchain/vk_swapchain.o", + "src/renderer/swapchain/vk_swapchain.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/swapchain/vk_swapchain.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/swapchain/vk_swapchain.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/swapchain/vk_render_pass.o", + "src/renderer/swapchain/vk_render_pass.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/swapchain/vk_render_pass.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/swapchain/vk_render_pass.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/platform/window.o", + "src/platform/window.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/platform/window.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/platform/window.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++17", + "-O3", + "-fPIC", + "-D", + "IMAGE_OPTIMIZED", + "-I./includes", + "-I./src", + "-I./third_party", + "-c", + "-o", + "src/renderer/images/texture.o", + "src/renderer/images/texture.cpp" + ], + "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX", + "file": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/images/texture.cpp", + "output": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/src/renderer/images/texture.o" + } +] diff --git a/MacroLibX/example/42_logo.png b/MacroLibX/example/42_logo.png new file mode 100644 index 0000000..5d2b681 Binary files /dev/null and b/MacroLibX/example/42_logo.png differ diff --git a/MacroLibX/example/Test b/MacroLibX/example/Test new file mode 100755 index 0000000..2a36f9c Binary files /dev/null and b/MacroLibX/example/Test differ diff --git a/MacroLibX/example/build.sh b/MacroLibX/example/build.sh new file mode 100755 index 0000000..28c509c --- /dev/null +++ b/MacroLibX/example/build.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +if [ $(uname -s) = 'Darwin' ]; then + clang main.c ../libmlx.dylib -L /opt/homebrew/lib -lSDL2 -g; +else + clang main.c ../libmlx.so -lSDL2 -g; +fi + diff --git a/MacroLibX/example/font.ttf b/MacroLibX/example/font.ttf new file mode 100644 index 0000000..85c1472 Binary files /dev/null and b/MacroLibX/example/font.ttf differ diff --git a/MacroLibX/example/main.c b/MacroLibX/example/main.c new file mode 100644 index 0000000..3f55325 --- /dev/null +++ b/MacroLibX/example/main.c @@ -0,0 +1,122 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* main.c :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 17:55:21 by maldavid #+# #+# */ +/* Updated: 2024/01/03 12:29:31 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include "../includes/mlx.h" + +typedef struct s_mlx +{ + void *mlx; + void *win; + void *logo; + void *img; +} t_mlx; + +int update(void *param) +{ + static int i = 0; + int j; + int k; + t_mlx *mlx; + + mlx = (t_mlx *)param; + mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo, 100, 100); + mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->img, 150, 60); + if (i == 0) + mlx_set_font_scale(mlx->mlx, mlx->win, "font.ttf", 16.f); + mlx_string_put(mlx->mlx, mlx->win, 20, 50, 0xFFFFFFFF, "that's a text"); + j = 0; + k = 0; + while (j++ < 400) + { + mlx_pixel_put(mlx->mlx, mlx->win, j, j, 0xFFFF0000 + k); + mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, 0xFF0000FF); + k += (k < 255); + } + if (++i == 5000) + mlx_clear_window(mlx->mlx, mlx->win); + if (i == 7000) + mlx_set_font_scale(mlx->mlx, mlx->win, "default", 16.f); + return (0); +} + +void *create_image(t_mlx *mlx) +{ + unsigned char pixel[4]; + int i[3]; + void *img; + + memset(i, 0, sizeof(int) * 3); + img = mlx_new_image(mlx->mlx, 100, 100); + while (i[0] < (100 * 100) * 4) + { + if (i[0] < 10000 || i[0] > 20000) + { + pixel[0] = i[0]; + pixel[1] = i[1]; + pixel[2] = i[2]; + pixel[3] = 0x99; + mlx_set_image_pixel(mlx->mlx, img, i[1], i[2], *((int *)pixel)); + } + i[0] += 4; + i[1]++; + if (i[1] >= 100) + { + i[1] = 0; + i[2]++; + } + } + return (img); +} + +int key_hook(int key, void *param) +{ + if (key == 41) + mlx_loop_end(((t_mlx *)param)->mlx); + return (0); +} + +int window_hook(int event, void *param) +{ + if (event == 0) + mlx_loop_end(((t_mlx *)param)->mlx); + return (0); +} + +int main(int argc, char* argv[]) +{ + t_mlx mlx; + void *img; + int w; + int h; + + (void)argc; + (void)argv; + mlx.mlx = mlx_init(); + mlx.win = mlx_new_window(mlx.mlx, 400, 400, "My window"); + mlx_on_event(mlx.mlx, mlx.win, MLX_KEYDOWN, key_hook, &mlx); + mlx_on_event(mlx.mlx, mlx.win, MLX_WINDOW_EVENT, window_hook, &mlx); + mlx.logo = mlx_png_file_to_image(mlx.mlx, "42_logo.png", &w, &h); + mlx_pixel_put(mlx.mlx, mlx.win, 200, 10, 0xFFFF00FF); + mlx_put_image_to_window(mlx.mlx, mlx.win, mlx.logo, 10, 190); + mlx.img = create_image(&mlx); + mlx_string_put(mlx.mlx, mlx.win, 20, 20, 0xFFFF2000, \ + "that text will disappear"); + mlx_loop_hook(mlx.mlx, update, &mlx); + mlx_loop(mlx.mlx); + mlx_destroy_image(mlx.mlx, mlx.img); + mlx_destroy_image(mlx.mlx, mlx.logo); + mlx_destroy_window(mlx.mlx, mlx.win); + mlx_destroy_display(mlx.mlx); + return (0); +} diff --git a/MacroLibX/example/run.sh b/MacroLibX/example/run.sh new file mode 100755 index 0000000..993cb20 --- /dev/null +++ b/MacroLibX/example/run.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +bash ./build.sh +./a.out diff --git a/MacroLibX/includes/mlx.h b/MacroLibX/includes/mlx.h new file mode 100644 index 0000000..14f4453 --- /dev/null +++ b/MacroLibX/includes/mlx.h @@ -0,0 +1,365 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* mlx.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ +/* Updated: 2023/12/27 17:19:50 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +// MacroLibX official repo https://github.com/seekrs/MacroLibX + +#ifndef __MACRO_LIB_X_H__ +#define __MACRO_LIB_X_H__ + +#include "mlx_profile.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum +{ + MLX_KEYDOWN = 0, + MLX_KEYUP = 1, + MLX_MOUSEDOWN = 2, + MLX_MOUSEUP = 3, + MLX_MOUSEWHEEL = 4, + MLX_WINDOW_EVENT = 5 +} mlx_event_type; + + +/** + * @brief Initializes the MLX internal application + * + * @return (void*) An opaque pointer to the internal MLX application or NULL (0x0) in case of error + */ +MLX_API void* mlx_init(); + + +/** + * @brief Creates a new window + * + * @param mlx Internal MLX application + * @param w Width of the window + * @param h Height of the window + * @param title Title of the window + * + * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error + */ +MLX_API void* mlx_new_window(void* mlx, int w, int h, const char* title); + + +/** + * @brief Gives a function to be executed at each loop turn + * + * @param mlx Internal MLX application + * @param f The function + * @param param Param to give to the function passed + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_loop_hook(void* mlx, int (*f)(void*), void* param); + + +/** + * @brief Starts the internal main loop + * + * @param mlx Internal MLX application + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_loop(void* mlx); + + +/** + * @brief Ends the internal main loop + * + * @param mlx Internal MLX application + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_loop_end(void* mlx); + + +/** + * @brief Shows mouse cursor + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_mouse_show(); + + +/** + * @brief Hides mouse cursor + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_mouse_hide(); + + +/** + * @brief Moves cursor to givent position + * + * @param mlx Internal MLX application + * @param win Internal window from which cursor moves + * @param x X coordinate + * @param y Y coordinate + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_mouse_move(void* mlx, void* win, int x, int y); + + +/** + * @brief Get cursor's position + * + * @param mlx Internal MLX application + * @param x Get x coordinate + * @param y Get y coordinate + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_mouse_get_pos(void* mlx, int* x, int* y); + + +/** + * @brief Gives a function to be executed on event type + * + * @param mlx Internal MLX application + * @param win Internal window + * @param event Event type (see union on top of this file) + * @param f Function to be executed + * @param param Parameter given to the function + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(int, void*), void* param); + + +/** + * @brief Put a pixel in the window + * + * @param mlx Internal MLX application + * @param win Internal window + * @param x X coordinate + * @param y Y coordinate + * @param color Color of the pixel (coded on 3 bytes in an int, 0x00RRGGBB) + * + * Note : If your're reading pixel colors from an image, don't forget to shift them + * one byte to the right as image pixels are encoded as 0xRRGGBBAA and pixel put takes 0x00RRGGBB. + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_pixel_put(void* mlx, void* win, int x, int y, int color); + + +/** + * @brief Create a new empty image + * + * @param mlx Internal MLX application + * @param width Width of the image + * @param height Height of the image + * + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + */ +MLX_API void* mlx_new_image(void* mlx, int width, int height); + + +/** + * @brief Get image pixel data + * + * @param mlx Internal MLX application + * @param img Internal image + * @param x X coordinate in the image + * @param y Y coordinate in the image + * + * @return (int) Return the pixel data + * + * /!\ If you run into glitches when writing or reading pixels from images /!\ + * You need to add IMAGES_OPTIMIZED=false to your make mlx command + * ``` + * ~ git clone https://github.com/seekrs/MacroLibX.git + * ~ cd MacroLibX + * ~ make IMAGES_OPTIMIZED=false + * ``` + */ +MLX_API int mlx_get_image_pixel(void* mlx, void* img, int x, int y); + + +/** + * @brief Set image pixel data + * + * @param mlx Internal MLX application + * @param img Internal image + * @param x X coordinate in the image + * @param y Y coordinate in the image + * @param color Color of the pixel to set + * + * @return (void) + * + * /!\ If you run into glitches when writing or reading pixels from images /!\ + * You need to add IMAGES_OPTIMIZED=false to your make mlx command + * ``` + * ~ git clone https://github.com/seekrs/MacroLibX.git + * ~ cd MacroLibX + * ~ make IMAGES_OPTIMIZED=false + * ``` + */ +MLX_API void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color); + + +/** + * @brief Put image to the given window + * + * @param mlx Internal MLX application + * @param win Internal window + * @param img Internal image + * @param x X coordinate + * @param y Y coordinate + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y); + + +/** + * @brief Destroys internal image + * + * @param mlx Internal MLX application + * @param img Internal image + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_destroy_image(void* mlx, void* img); + + +/** + * @brief Create a new image from a png file + * + * @param mlx Internal MLX application + * @param filename Path to the png file + * @param width Get the width of the image + * @param heigth Get the height of the image + * + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + */ +MLX_API void* mlx_png_file_to_image(void* mlx, char* filename, int* width, int* height); + + +/** + * @brief Create a new image from a jpg file + * + * @param mlx Internal MLX application + * @param filename Path to the jpg file + * @param width Get the width of the image + * @param heigth Get the height of the image + * + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + */ +MLX_API void* mlx_jpg_file_to_image(void* mlx, char* filename, int* width, int* height); + + +/** + * @brief Create a new image from a bmp file + * + * @param mlx Internal MLX application + * @param filename Path to the bmp file + * @param width Get the width of the image + * @param heigth Get the height of the image + * + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + */ +MLX_API void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* height); + + +/** + * @brief Put text in given window + * + * @param mlx Internal MLX application + * @param win Internal window + * @param x X coordinate + * @param y Y coordinate + * @param color Color of the pixel (coded on 3 bytes in an int, 0x00RRGGBB) + * @param str Text to put + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str); + + +/** + * @brief Loads a font to be used by `mlx_string_put` + * + * @param mlx Internal MLX application + * @param win Internal window + * @param filepath Filepath to the font or "default" to reset to the embedded font + * + * @return (void) + */ +MLX_API void mlx_set_font(void* mlx, void* win, char* filepath); + + +/** + * @brief Loads a font to be used by `mlx_string_put` and scales it + * + * @param mlx Internal MLX application + * @param win Internal window + * @param filepath Filepath to the font or "default" to reset to the embedded font + * @param scale Scale to apply to the font + * + * @return (void) + */ +MLX_API void mlx_set_font_scale(void* mlx, void* win, char* filepath, float scale); + + +/** + * @brief Clears the given window (resets all rendered data) + * + * @param mlx Internal MLX application + * @param win Internal window + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_clear_window(void* mlx, void* win); + + +/** + * @brief Destroys internal window + * + * @param mlx Internal MLX application + * @param win Internal window + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_destroy_window(void* mlx, void* win); + +/** + * @brief Destroy internal MLX application + * + * @param mlx Internal MLX application + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_destroy_display(void* mlx); + + +/** + * @brief Get screen size + * + * @param mlx Internal MLX application + * @param x Get X size + * @param y Get Y size + * + * @return (int) Always return 0, made this to copy the behaviour of the original MLX + */ +MLX_API int mlx_get_screens_size(void* mlx, int* w, int* h); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/includes/mlx_profile.h b/MacroLibX/includes/mlx_profile.h new file mode 100644 index 0000000..90710bc --- /dev/null +++ b/MacroLibX/includes/mlx_profile.h @@ -0,0 +1,215 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* mlx_profile.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/11/10 08:49:17 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:33:35 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_PROFILE__ +#define __MLX_PROFILE__ + +// Try to identify the compiler +#if defined(__BORLANDC__) + #define MLX_COMPILER_BORDLAND +#elif defined(__clang__) + #define MLX_COMPILER_CLANG + #ifdef __MINGW32__ + #define MLX_COMPILER_MINGW + #ifdef __MINGW64_VERSION_MAJOR + #define MLX_COMPILER_MINGW_W64 + #endif + #endif +#elif defined(__GNUC__) || defined(__MINGW32__) + #define MLX_COMPILER_GCC + #ifdef __MINGW32__ + #define MLX_COMPILER_MINGW + #ifdef __MINGW64_VERSION_MAJOR + #define MLX_COMPILER_MINGW_W64 + #endif + #endif +#elif defined(__INTEL_COMPILER) || defined(__ICL) + #define MLX_COMPILER_INTEL +#elif defined(_MSC_VER) + #define MLX_COMPILER_MSVC +#else + #define MLX_COMPILER_UNKNOWN + #warning "This compiler is not fully supported" +#endif + +#if defined(_WIN32) || defined(__CYGWIN__) + #define MLX_PLAT_WINDOWS +#elif defined(__linux__) + #define MLX_PLAT_LINUX +#elif defined(__APPLE__) && defined(__MACH__) + #define MLX_PLAT_MACOS +#elif defined(unix) || defined(__unix__) || defined(__unix) + #define MLX_PLAT_UNIX +#else + #error "Unknown environment (not Windows, not Linux, not MacOS, not Unix)" +#endif + +#ifdef MLX_PLAT_WINDOWS + #ifdef MLX_COMPILER_MSVC + #ifdef MLX_BUILD + #define MLX_API __declspec(dllexport) + #else + #define MLX_API __declspec(dllimport) + #endif + #elif defined(MLX_COMPILER_GCC) + #ifdef MLX_BUILD + #define MLX_API __attribute__((dllexport)) + #else + #define MLX_API __attribute__((dllimport)) + #endif + #else + #define MLX_API + #endif +#elif defined(MLX_COMPILER_GCC) + #define MLX_API __attribute__((visibility("default"))) +#else + #define MLX_API +#endif + +#if defined(__GNUC__) || (defined(__MWERKS__) && (__MWERKS__ >= 0x3000)) || (defined(__ICC) && (__ICC >= 600)) || defined(__ghs__) + #define MLX_FUNC_SIG __PRETTY_FUNCTION__ +#elif defined(__DMC__) && (__DMC__ >= 0x810) + #define MLX_FUNC_SIG __PRETTY_FUNCTION__ +#elif (defined(__FUNCSIG__) || (_MSC_VER)) + #define MLX_FUNC_SIG __FUNCSIG__ +#elif (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 600)) || (defined(__IBMCPP__) && (__IBMCPP__ >= 500)) + #define MLX_FUNC_SIG __FUNCTION__ +#elif defined(__BORLANDC__) && (__BORLANDC__ >= 0x550) + #define MLX_FUNC_SIG __FUNC__ +#elif defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901) + #define MLX_FUNC_SIG __func__ +#elif defined(__cplusplus) && (__cplusplus >= 201103) + #define MLX_FUNC_SIG __func__ +#else + #define MLX_FUNC_SIG "Unknown function" +#endif + +#ifndef __cplusplus // if we compile in C + #ifdef __STDC__ + #ifdef __STDC_VERSION__ + #if __STDC_VERSION__ == 199409L + #define MLX_C_VERSION 1994 + #elif __STDC_VERSION__ == 199901L + #define MLX_C_VERSION 1999 + #elif __STDC_VERSION__ == 201112L + #define MLX_C_VERSION 2011 + #elif __STDC_VERSION__ == 201710L + #define MLX_C_VERSION 2017 + #elif __STDC_VERSION__ == 202311L + #define MLX_C_VERSION 2023 + #else + #define MLX_C_VERSION 0 + #endif + #else + #define MLX_C_VERSION 0 + #endif + #else + #define MLX_C_VERSION 0 + #endif +#else + #define MLX_C_VERSION 0 +#endif + +#if defined(MLX_PLAT_WINDOWS) + #define VK_USE_PLATFORM_WIN32_KHR + #ifdef __cplusplus + constexpr const char* VULKAN_LIB_NAME = "vulkan-1.dll"; + #endif +#elif defined(MLX_PLAT_MACOS) + #define VK_USE_PLATFORM_MACOS_MVK + #define VK_USE_PLATFORM_METAL_EXT + #ifdef __cplusplus + constexpr const char* VULKAN_LIB_NAME = "libvulkan.dylib / libvulkan.1.dylib / libMoltenVK.dylib"; + #endif +#else + #define VK_USE_PLATFORM_XLIB_KHR + #define VK_USE_PLATFORM_WAYLAND_KHR + #ifdef __cplusplus + constexpr const char* VULKAN_LIB_NAME = "libvulkan.so / libvulkan.so.1"; + #endif +#endif + +// Checking common assumptions +#ifdef __cplusplus + #include + #include + + static_assert(CHAR_BIT == 8, "CHAR_BIT is expected to be 8"); + + static_assert(sizeof(int8_t) == 1, "int8_t is not of the correct size" ); + static_assert(sizeof(int16_t) == 2, "int16_t is not of the correct size"); + static_assert(sizeof(int32_t) == 4, "int32_t is not of the correct size"); + static_assert(sizeof(int64_t) == 8, "int64_t is not of the correct size"); + + static_assert(sizeof(uint8_t) == 1, "uint8_t is not of the correct size" ); + static_assert(sizeof(uint16_t) == 2, "uint16_t is not of the correct size"); + static_assert(sizeof(uint32_t) == 4, "uint32_t is not of the correct size"); + static_assert(sizeof(uint64_t) == 8, "uint64_t is not of the correct size"); +#elif MLX_C_VERSION >= 2011 + #if MLX_C_VERSION < 2023 + #include + #endif + #include + #include + + static_assert(CHAR_BIT == 8, "CHAR_BIT is expected to be 8"); + + static_assert(sizeof(int8_t) == 1, "int8_t is not of the correct size" ); + static_assert(sizeof(int16_t) == 2, "int16_t is not of the correct size"); + static_assert(sizeof(int32_t) == 4, "int32_t is not of the correct size"); + static_assert(sizeof(int64_t) == 8, "int64_t is not of the correct size"); + + static_assert(sizeof(uint8_t) == 1, "uint8_t is not of the correct size" ); + static_assert(sizeof(uint16_t) == 2, "uint16_t is not of the correct size"); + static_assert(sizeof(uint32_t) == 4, "uint32_t is not of the correct size"); + static_assert(sizeof(uint64_t) == 8, "uint64_t is not of the correct size"); +#elif defined(MLX_COMPILER_GCC) + #define STATIC_ASSERT(cnd, descr) \ + ({ \ + extern int __attribute__((error("static assert failed: (" #cnd ") (" #descr ")"))) compile_time_check(void); \ + ((cnd) ? 0 : compile_time_check()), 0; \ + }) + + #include + #include + + STATIC_ASSERT(CHAR_BIT == 8, "CHAR_BIT is expected to be 8"); + + STATIC_ASSERT(sizeof(int8_t) == 1, "int8_t is not of the correct size" ); + STATIC_ASSERT(sizeof(int16_t) == 2, "int16_t is not of the correct size"); + STATIC_ASSERT(sizeof(int32_t) == 4, "int32_t is not of the correct size"); + STATIC_ASSERT(sizeof(int64_t) == 8, "int64_t is not of the correct size"); + + STATIC_ASSERT(sizeof(uint8_t) == 1, "uint8_t is not of the correct size" ); + STATIC_ASSERT(sizeof(uint16_t) == 2, "uint16_t is not of the correct size"); + STATIC_ASSERT(sizeof(uint32_t) == 4, "uint32_t is not of the correct size"); + STATIC_ASSERT(sizeof(uint64_t) == 8, "uint64_t is not of the correct size"); +#else + #define STATIC_ASSERT(COND, MSG) typedef char static_assertion___##MSG[(COND)?1:-1] + + #include + #include + + STATIC_ASSERT(CHAR_BIT == 8, CHAR_BIT_is_expected_to_be_8); + + STATIC_ASSERT(sizeof(int8_t) == 1, int8_t_is_not_of_the_correct_size); + STATIC_ASSERT(sizeof(int16_t) == 2, int16_t_is_not_of_the_correct_size); + STATIC_ASSERT(sizeof(int32_t) == 4, int32_t_is_not_of_the_correct_size); + STATIC_ASSERT(sizeof(int64_t) == 8, int64_t_is_not_of_the_correct_size); + + STATIC_ASSERT(sizeof(uint8_t) == 1, uint8_t_is_not_of_the_correct_size); + STATIC_ASSERT(sizeof(uint16_t) == 2, uint16_t_is_not_of_the_correct_size); + STATIC_ASSERT(sizeof(uint32_t) == 4, uint32_t_is_not_of_the_correct_size); + STATIC_ASSERT(sizeof(uint64_t) == 8, uint64_t_is_not_of_the_correct_size); +#endif + +#endif diff --git a/MacroLibX/libmlx.so b/MacroLibX/libmlx.so new file mode 100755 index 0000000..dc17b9a Binary files /dev/null and b/MacroLibX/libmlx.so differ diff --git a/MacroLibX/objs/makefile/src/core/application.o b/MacroLibX/objs/makefile/src/core/application.o new file mode 100644 index 0000000..7bf55dd Binary files /dev/null and b/MacroLibX/objs/makefile/src/core/application.o differ diff --git a/MacroLibX/objs/makefile/src/core/bridge.o b/MacroLibX/objs/makefile/src/core/bridge.o new file mode 100644 index 0000000..ee60b7a Binary files /dev/null and b/MacroLibX/objs/makefile/src/core/bridge.o differ diff --git a/MacroLibX/objs/makefile/src/core/errors.o b/MacroLibX/objs/makefile/src/core/errors.o new file mode 100644 index 0000000..f9e9324 Binary files /dev/null and b/MacroLibX/objs/makefile/src/core/errors.o differ diff --git a/MacroLibX/objs/makefile/src/core/graphics.o b/MacroLibX/objs/makefile/src/core/graphics.o new file mode 100644 index 0000000..6a3e721 Binary files /dev/null and b/MacroLibX/objs/makefile/src/core/graphics.o differ diff --git a/MacroLibX/objs/makefile/src/core/memory.o b/MacroLibX/objs/makefile/src/core/memory.o new file mode 100644 index 0000000..d8719a1 Binary files /dev/null and b/MacroLibX/objs/makefile/src/core/memory.o differ diff --git a/MacroLibX/objs/makefile/src/platform/inputs.o b/MacroLibX/objs/makefile/src/platform/inputs.o new file mode 100644 index 0000000..60cf2de Binary files /dev/null and b/MacroLibX/objs/makefile/src/platform/inputs.o differ diff --git a/MacroLibX/objs/makefile/src/platform/window.o b/MacroLibX/objs/makefile/src/platform/window.o new file mode 100644 index 0000000..810d1c9 Binary files /dev/null and b/MacroLibX/objs/makefile/src/platform/window.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/buffers/vk_buffer.o b/MacroLibX/objs/makefile/src/renderer/buffers/vk_buffer.o new file mode 100644 index 0000000..a53e191 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/buffers/vk_buffer.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/buffers/vk_ubo.o b/MacroLibX/objs/makefile/src/renderer/buffers/vk_ubo.o new file mode 100644 index 0000000..c571e8a Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/buffers/vk_ubo.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/buffers/vk_vbo.o b/MacroLibX/objs/makefile/src/renderer/buffers/vk_vbo.o new file mode 100644 index 0000000..89d3bee Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/buffers/vk_vbo.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/command/cmd_manager.o b/MacroLibX/objs/makefile/src/renderer/command/cmd_manager.o new file mode 100644 index 0000000..8ccf79d Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/command/cmd_manager.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/command/single_time_cmd_manager.o b/MacroLibX/objs/makefile/src/renderer/command/single_time_cmd_manager.o new file mode 100644 index 0000000..269c235 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/command/single_time_cmd_manager.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/command/vk_cmd_buffer.o b/MacroLibX/objs/makefile/src/renderer/command/vk_cmd_buffer.o new file mode 100644 index 0000000..eaeecbe Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/command/vk_cmd_buffer.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/command/vk_cmd_pool.o b/MacroLibX/objs/makefile/src/renderer/command/vk_cmd_pool.o new file mode 100644 index 0000000..be4894d Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/command/vk_cmd_pool.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/memory.o b/MacroLibX/objs/makefile/src/renderer/core/memory.o new file mode 100644 index 0000000..84411fd Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/memory.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/render_core.o b/MacroLibX/objs/makefile/src/renderer/core/render_core.o new file mode 100644 index 0000000..ae3aa7b Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/render_core.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/vk_device.o b/MacroLibX/objs/makefile/src/renderer/core/vk_device.o new file mode 100644 index 0000000..6ff5cac Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/vk_device.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/vk_fence.o b/MacroLibX/objs/makefile/src/renderer/core/vk_fence.o new file mode 100644 index 0000000..edfa8f5 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/vk_fence.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/vk_instance.o b/MacroLibX/objs/makefile/src/renderer/core/vk_instance.o new file mode 100644 index 0000000..c3db0b1 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/vk_instance.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/vk_queues.o b/MacroLibX/objs/makefile/src/renderer/core/vk_queues.o new file mode 100644 index 0000000..8db4ce5 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/vk_queues.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/vk_semaphore.o b/MacroLibX/objs/makefile/src/renderer/core/vk_semaphore.o new file mode 100644 index 0000000..5fba0f1 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/vk_semaphore.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/vk_surface.o b/MacroLibX/objs/makefile/src/renderer/core/vk_surface.o new file mode 100644 index 0000000..1949163 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/vk_surface.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/core/vk_validation_layers.o b/MacroLibX/objs/makefile/src/renderer/core/vk_validation_layers.o new file mode 100644 index 0000000..9aa14df Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/core/vk_validation_layers.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_pool.o b/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_pool.o new file mode 100644 index 0000000..8e6216d Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_pool.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_set.o b/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_set.o new file mode 100644 index 0000000..6880a4c Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_set.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_set_layout.o b/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_set_layout.o new file mode 100644 index 0000000..dbc2cc6 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/descriptors/vk_descriptor_set_layout.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/font.o b/MacroLibX/objs/makefile/src/renderer/font.o new file mode 100644 index 0000000..6e9e4a7 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/font.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/images/texture.o b/MacroLibX/objs/makefile/src/renderer/images/texture.o new file mode 100644 index 0000000..8a1eec0 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/images/texture.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/images/texture_atlas.o b/MacroLibX/objs/makefile/src/renderer/images/texture_atlas.o new file mode 100644 index 0000000..1fba2b1 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/images/texture_atlas.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/images/vk_image.o b/MacroLibX/objs/makefile/src/renderer/images/vk_image.o new file mode 100644 index 0000000..e076508 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/images/vk_image.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/pipeline/pipeline.o b/MacroLibX/objs/makefile/src/renderer/pipeline/pipeline.o new file mode 100644 index 0000000..771cf3f Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/pipeline/pipeline.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/pixel_put.o b/MacroLibX/objs/makefile/src/renderer/pixel_put.o new file mode 100644 index 0000000..6388b81 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/pixel_put.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/renderer.o b/MacroLibX/objs/makefile/src/renderer/renderer.o new file mode 100644 index 0000000..78dcbaa Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/renderer.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/renderpass/vk_framebuffer.o b/MacroLibX/objs/makefile/src/renderer/renderpass/vk_framebuffer.o new file mode 100644 index 0000000..6d4d0ed Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/renderpass/vk_framebuffer.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/renderpass/vk_render_pass.o b/MacroLibX/objs/makefile/src/renderer/renderpass/vk_render_pass.o new file mode 100644 index 0000000..5c862a7 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/renderpass/vk_render_pass.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/swapchain/vk_swapchain.o b/MacroLibX/objs/makefile/src/renderer/swapchain/vk_swapchain.o new file mode 100644 index 0000000..1dea24c Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/swapchain/vk_swapchain.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/text_library.o b/MacroLibX/objs/makefile/src/renderer/text_library.o new file mode 100644 index 0000000..96ce8f5 Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/text_library.o differ diff --git a/MacroLibX/objs/makefile/src/renderer/text_pipeline.o b/MacroLibX/objs/makefile/src/renderer/text_pipeline.o new file mode 100644 index 0000000..7ebe93b Binary files /dev/null and b/MacroLibX/objs/makefile/src/renderer/text_pipeline.o differ diff --git a/MacroLibX/res/logo.png b/MacroLibX/res/logo.png new file mode 100644 index 0000000..a79f687 Binary files /dev/null and b/MacroLibX/res/logo.png differ diff --git a/MacroLibX/res/screenshot_test.png b/MacroLibX/res/screenshot_test.png new file mode 100644 index 0000000..61b40ba Binary files /dev/null and b/MacroLibX/res/screenshot_test.png differ diff --git a/MacroLibX/res/screenshot_test_windows.png b/MacroLibX/res/screenshot_test_windows.png new file mode 100644 index 0000000..65f5771 Binary files /dev/null and b/MacroLibX/res/screenshot_test_windows.png differ diff --git a/MacroLibX/scripts/fetch_dependencies.sh b/MacroLibX/scripts/fetch_dependencies.sh new file mode 100644 index 0000000..d40d1b2 --- /dev/null +++ b/MacroLibX/scripts/fetch_dependencies.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# Update volk +rm -f ../third_party/volk.c +rm -f ../third_party/volk.h +tag_name=$(curl -sL https://api.github.com/repos/zeux/Volk/releases/latest | jq -r '.tag_name') +wget https://api.github.com/repos/zeux/volk/zipball/$tag_name -O volk.zip +unzip -o volk.zip -d ../third_party/ +mv ../third_party/zeux-volk*/volk.h ../third_party +mv ../third_party/zeux-volk*/volk.c ../third_party +rm -rf ../third_party/zeux-volk* +rm volk.zip + +# Update VMA +rm -f ../third_party/vma.h +tag_name=$(curl -sL https://api.github.com/repos/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator/releases/latest | jq -r '.tag_name') +wget https://api.github.com/repos/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator/zipball/$tag_name -O vma.zip +unzip -o vma.zip -d ../third_party/ +mv ../third_party/GPUOpen-LibrariesAndSDKs-VulkanMemoryAllocator*/include/vk_mem_alloc.h ../third_party/vma.h +rm -rf ../third_party/GPUOpen-LibrariesAndSDKs-VulkanMemoryAllocator* +rm vma.zip + +# Update Vulkan headers +rm -rf ../third_party/vulkan +rm -rf ../third_party/vk_video +wget https://github.com/KhronosGroup/Vulkan-Headers/archive/main.zip -O vulkan-headers.zip +unzip -o vulkan-headers.zip -d ../third_party/ +mv ../third_party/Vulkan-Headers-main/include/vulkan ../third_party/ +mv ../third_party/Vulkan-Headers-main/include/vk_video ../third_party/ +rm -rf ../third_party/Vulkan-Headers-main +rm vulkan-headers.zip diff --git a/MacroLibX/src/core/application.cpp b/MacroLibX/src/core/application.cpp new file mode 100644 index 0000000..21bc253 --- /dev/null +++ b/MacroLibX/src/core/application.cpp @@ -0,0 +1,79 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* application.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ +/* Updated: 2023/12/27 21:30:10 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "application.h" +#include +#include +#include +#include +#include +#include +#include + +namespace mlx::core +{ + static bool __drop_sdl_responsability = false; + Application::Application() : _in(std::make_unique()) + { + __drop_sdl_responsability = SDL_WasInit(SDL_INIT_VIDEO); + if(__drop_sdl_responsability) // is case the mlx is running in a sandbox like MacroUnitTester where SDL is already init + return; + SDL_SetMemoryFunctions(MemManager::malloc, MemManager::calloc, MemManager::realloc, MemManager::free); + if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_EVENTS | SDL_INIT_TIMER) != 0) + error::report(e_kind::fatal_error, "SDL error : unable to init all subsystems : %s", SDL_GetError()); + } + + void Application::run() noexcept + { + while(_in->is_running()) + { + _in->update(); + + if(_loop_hook) + _loop_hook(_param); + + for(auto& gs : _graphics) + gs->render(); + } + } + + void* Application::newTexture(int w, int h) + { + #ifdef DEBUG + _textures.emplace_front().create(nullptr, w, h, VK_FORMAT_R8G8B8A8_UNORM, "__mlx_unamed_user_texture"); + #else + _textures.emplace_front().create(nullptr, w, h, VK_FORMAT_R8G8B8A8_UNORM, nullptr); + #endif + return &_textures.front(); + } + + void* Application::newStbTexture(char* file, int* w, int* h) + { + _textures.emplace_front(stbTextureLoad(file, w, h)); + return &_textures.front(); + } + + void Application::destroyTexture(void* ptr) + { + vkDeviceWaitIdle(Render_Core::get().getDevice().get()); // TODO : synchronize with another method than stopping all the GPU process + Texture* texture = static_cast(ptr); + texture->destroy(); + } + + Application::~Application() + { + if(__drop_sdl_responsability) + return; + SDL_QuitSubSystem(SDL_INIT_VIDEO | SDL_INIT_TIMER | SDL_INIT_EVENTS); + SDL_Quit(); + } +} diff --git a/MacroLibX/src/core/application.h b/MacroLibX/src/core/application.h new file mode 100644 index 0000000..24c2368 --- /dev/null +++ b/MacroLibX/src/core/application.h @@ -0,0 +1,77 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* application.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ +/* Updated: 2023/12/22 21:04:48 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_APPLICATION__ +#define __MLX_APPLICATION__ + +#include +#include +#include +#include +#include +#include + +#include + +#include +#include +#include + +namespace mlx::core +{ + class Application + { + public: + Application(); + + inline void getMousePos(int* x, int* y) noexcept; + inline void mouseMove(void* win, int x, int y) noexcept; + + inline void onEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept; + + inline void getScreenSize(int* w, int* h) noexcept; + + inline void* newGraphicsSuport(std::size_t w, std::size_t h, const char* title); + inline void clearGraphicsSupport(void* win); + inline void destroyGraphicsSupport(void* win); + + inline void pixelPut(void* win, int x, int y, uint32_t color) const noexcept; + inline void stringPut(void* win, int x, int y, int color, char* str); + + void* newTexture(int w, int h); + void* newStbTexture(char* file, int* w, int* h); // stb textures are format managed by stb image (png, jpg, bpm, ...) + inline void texturePut(void* win, void* img, int x, int y); + inline int getTexturePixel(void* img, int x, int y); + inline void setTexturePixel(void* img, int x, int y, uint32_t color); + void destroyTexture(void* ptr); + + inline void loopHook(int (*f)(void*), void* param); + inline void loopEnd() noexcept; + + inline void loadFont(void* win, const std::filesystem::path& filepath, float scale); + + void run() noexcept; + + ~Application(); + + private: + std::list _textures; + std::vector> _graphics; + std::function _loop_hook; + std::unique_ptr _in; + void* _param = nullptr; + }; +} + +#include + +#endif // __MLX_APPLICATION__ diff --git a/MacroLibX/src/core/application.inl b/MacroLibX/src/core/application.inl new file mode 100644 index 0000000..3ecf15c --- /dev/null +++ b/MacroLibX/src/core/application.inl @@ -0,0 +1,158 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* application.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ +/* Updated: 2023/04/02 14:56:27 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include + +#define CHECK_WINDOW_PTR(win) \ + if(win == nullptr) \ + { \ + core::error::report(e_kind::error, "invalid window ptr (NULL)"); \ + return; \ + } \ + else if(*static_cast(win) < 0 || *static_cast(win) > static_cast(_graphics.size()))\ + { \ + core::error::report(e_kind::error, "invalid window ptr"); \ + return; \ + } else {}\ + +namespace mlx::core +{ + void Application::getMousePos(int* x, int* y) noexcept + { + *x = _in->getX(); + *y = _in->getY(); + } + + void Application::mouseMove(void* win, int x, int y) noexcept + { + CHECK_WINDOW_PTR(win); + SDL_WarpMouseInWindow(_graphics[*static_cast(win)]->getWindow()->getNativeWindow(), x, y); + SDL_PumpEvents(); + SDL_FlushEvent(SDL_MOUSEMOTION); + } + + void Application::onEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept + { + CHECK_WINDOW_PTR(win); + _in->onEvent(_graphics[*static_cast(win)]->getWindow()->getID(), event, funct_ptr, param); + } + + void Application::getScreenSize(int* w, int* h) noexcept + { + SDL_DisplayMode DM; + SDL_GetDesktopDisplayMode(0, &DM); + *w = DM.w; + *h = DM.h; + } + + void* Application::newGraphicsSuport(std::size_t w, std::size_t h, const char* title) + { + auto it = std::find_if(_textures.begin(), _textures.end(), [=](const Texture& texture) + { + return &texture == reinterpret_cast(const_cast(title)); + }); + if(it != _textures.end()) + _graphics.emplace_back(std::make_unique(w, h, reinterpret_cast(const_cast(title)), _graphics.size())); + else + { + _graphics.emplace_back(std::make_unique(w, h, title, _graphics.size())); + _in->addWindow(_graphics.back()->getWindow()); + } + return static_cast(&_graphics.back()->getID()); + } + + void Application::clearGraphicsSupport(void* win) + { + CHECK_WINDOW_PTR(win); + _graphics[*static_cast(win)]->clearRenderData(); + } + + void Application::destroyGraphicsSupport(void* win) + { + CHECK_WINDOW_PTR(win); + _graphics[*static_cast(win)].reset(); + } + + void Application::pixelPut(void* win, int x, int y, uint32_t color) const noexcept + { + CHECK_WINDOW_PTR(win); + _graphics[*static_cast(win)]->pixelPut(x, y, color); + } + + void Application::stringPut(void* win, int x, int y, int color, char* str) + { + CHECK_WINDOW_PTR(win); + _graphics[*static_cast(win)]->stringPut(x, y, color, str); + } + + void Application::loadFont(void* win, const std::filesystem::path& filepath, float scale) + { + CHECK_WINDOW_PTR(win); + _graphics[*static_cast(win)]->loadFont(filepath, scale); + } + + void Application::texturePut(void* win, void* img, int x, int y) + { + CHECK_WINDOW_PTR(win); + if(img == nullptr) + { + core::error::report(e_kind::error, "wrong texture (NULL)"); + return; + } + Texture* texture = static_cast(img); + if(!texture->isInit()) + core::error::report(e_kind::error, "trying to put a texture that has been destroyed"); + else + _graphics[*static_cast(win)]->texturePut(texture, x, y); + } + + int Application::getTexturePixel(void* img, int x, int y) + { + if(img == nullptr) + { + core::error::report(e_kind::error, "wrong texture (NULL)"); + return 0; + } + Texture* texture = static_cast(img); + if(!texture->isInit()) + { + core::error::report(e_kind::error, "trying to get a pixel from texture that has been destroyed"); + return 0; + } + return texture->getPixel(x, y); + } + + void Application::setTexturePixel(void* img, int x, int y, uint32_t color) + { + if(img == nullptr) + { + core::error::report(e_kind::error, "wrong texture (NULL)"); + return; + } + Texture* texture = static_cast(img); + if(!texture->isInit()) + core::error::report(e_kind::error, "trying to set a pixel on texture that has been destroyed"); + else + texture->setPixel(x, y, color); + } + + void Application::loopHook(int (*f)(void*), void* param) + { + _loop_hook = f; + _param = param; + } + + void Application::loopEnd() noexcept + { + _in->finish(); + } +} diff --git a/MacroLibX/src/core/bridge.cpp b/MacroLibX/src/core/bridge.cpp new file mode 100644 index 0000000..beedec8 --- /dev/null +++ b/MacroLibX/src/core/bridge.cpp @@ -0,0 +1,255 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* bridge.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 17:35:20 by maldavid #+# #+# */ +/* Updated: 2023/12/31 00:22:58 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include "errors.h" +#include "application.h" +#include +#include +#include +#include +#include + +static void* __mlx_ptr = nullptr; + +#define MLX_CHECK_APPLICATION_POINTER(ptr) \ + if(ptr != __mlx_ptr || ptr == NULL) \ + mlx::core::error::report(e_kind::fatal_error, "invalid mlx pointer passed to '%s'", MLX_FUNC_SIG); \ + else {} // just to avoid issues with possible if-else statements outside this macro + +extern "C" +{ + void* mlx_init() + { + if(__mlx_ptr != nullptr) + { + mlx::core::error::report(e_kind::error, "MLX cannot be initialized multiple times"); + return NULL; // not nullptr for the C compatibility + } + mlx::MemManager::get(); // just to initialize the C garbage collector + mlx::core::Application* app = new mlx::core::Application; + mlx::Render_Core::get().init(); + if(app == nullptr) + mlx::core::error::report(e_kind::fatal_error, "Tout a pété"); + __mlx_ptr = static_cast(app); + return __mlx_ptr; + } + + void* mlx_new_window(void* mlx, int w, int h, const char* title) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + return static_cast(mlx)->newGraphicsSuport(w, h, title); + } + + int mlx_loop_hook(void* mlx, int (*f)(void*), void* param) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->loopHook(f, param); + return 0; + } + + int mlx_loop(void* mlx) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->run(); + return 0; + } + + int mlx_loop_end(void* mlx) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->loopEnd(); + return 0; + } + + int mlx_mouse_show() + { + return SDL_ShowCursor(SDL_ENABLE); + } + + int mlx_mouse_hide() + { + return SDL_ShowCursor(SDL_DISABLE); + } + + int mlx_mouse_move(void* mlx, void* win, int x, int y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->mouseMove(win, x, y); + return 0; + } + + int mlx_mouse_get_pos(void* mlx, int* x, int* y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->getMousePos(x, y); + return 0; + } + + int mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*funct_ptr)(int, void*), void* param) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->onEvent(win, static_cast(event), funct_ptr, param); + return 0; + } + + void* mlx_new_image(void* mlx, int width, int height) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + return static_cast(mlx)->newTexture(width, height); + } + + int mlx_get_image_pixel(void* mlx, void* img, int x, int y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + return static_cast(mlx)->getTexturePixel(img, x, y); + } + + void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + unsigned char color_bits[4]; + color_bits[0] = (color & 0x00FF0000) >> 16; + color_bits[1] = (color & 0x0000FF00) >> 8; + color_bits[2] = (color & 0x000000FF); + color_bits[3] = (color & 0xFF000000) >> 24; + static_cast(mlx)->setTexturePixel(img, x, y, *reinterpret_cast(color_bits)); + } + + int mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->texturePut(win, img, x, y); + return 0; + } + + int mlx_destroy_image(void* mlx, void* img) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->destroyTexture(img); + return 0; + } + + void* mlx_png_file_to_image(void* mlx, char* filename, int* width, int* height) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + std::filesystem::path file(filename); + if(file.extension() != ".png") + { + mlx::core::error::report(e_kind::error, "PNG loader : not a png file '%s'", filename); + return nullptr; + } + return static_cast(mlx)->newStbTexture(filename, width, height); + } + + void* mlx_jpg_file_to_image(void* mlx, char* filename, int* width, int* height) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + std::filesystem::path file(filename); + if(file.extension() != ".jpg" && file.extension() != ".jpeg") + { + mlx::core::error::report(e_kind::error, "JPG loader : not a jpg file '%s'", filename); + return nullptr; + } + return static_cast(mlx)->newStbTexture(filename, width, height); + } + + void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* height) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + std::filesystem::path file(filename); + if(file.extension() != ".bmp" && file.extension() != ".dib") + { + mlx::core::error::report(e_kind::error, "BMP loader : not a bmp file '%s'", filename); + return nullptr; + } + return static_cast(mlx)->newStbTexture(filename, width, height); + } + + int mlx_pixel_put(void* mlx, void* win, int x, int y, int color) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + unsigned char color_bits[4]; + color_bits[0] = (color & 0x00FF0000) >> 16; + color_bits[1] = (color & 0x0000FF00) >> 8; + color_bits[2] = (color & 0x000000FF); + color_bits[3] = (color & 0xFF000000) >> 24; + static_cast(mlx)->pixelPut(win, x, y, *reinterpret_cast(color_bits)); + return 0; + } + + int mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + unsigned char color_bits[4]; + color_bits[0] = (color & 0x00FF0000) >> 16; + color_bits[1] = (color & 0x0000FF00) >> 8; + color_bits[2] = color & 0x000000FF; + color_bits[3] = 0xFF; + static_cast(mlx)->stringPut(win, x, y, *reinterpret_cast(color_bits), str); + return 0; + } + + void mlx_set_font(void* mlx, void* win, char* filepath) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + std::filesystem::path file(filepath); + if(std::strcmp(filepath, "default") != 0 && file.extension() != ".ttf" && file.extension() != ".tte") + { + mlx::core::error::report(e_kind::error, "TTF loader : not a truetype font file '%s'", filepath); + return; + } + static_cast(mlx)->loadFont(win, file, 16.f); + } + + void mlx_set_font_scale(void* mlx, void* win, char* filepath, float scale) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + std::filesystem::path file(filepath); + if(std::strcmp(filepath, "default") != 0 && file.extension() != ".ttf" && file.extension() != ".tte") + { + mlx::core::error::report(e_kind::error, "TTF loader : not a truetype font file '%s'", filepath); + return; + } + static_cast(mlx)->loadFont(win, file, scale); + } + + int mlx_clear_window(void* mlx, void* win) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->clearGraphicsSupport(win); + return 0; + } + + int mlx_destroy_window(void* mlx, void* win) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->destroyGraphicsSupport(win); + return 0; + } + + int mlx_destroy_display(void* mlx) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + delete static_cast(mlx); + mlx::Render_Core::get().destroy(); + __mlx_ptr = nullptr; + return 0; + } + + int mlx_get_screens_size(void* mlx, int* w, int* h) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->getScreenSize(w, h); + return 0; + } +} diff --git a/MacroLibX/src/core/errors.cpp b/MacroLibX/src/core/errors.cpp new file mode 100644 index 0000000..0fb675b --- /dev/null +++ b/MacroLibX/src/core/errors.cpp @@ -0,0 +1,44 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* errors.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 17:48:06 by maldavid #+# #+# */ +/* Updated: 2023/11/14 07:14:57 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include + +#include "errors.h" + +constexpr const int BUFFER_SIZE = 4096; + +namespace mlx::core::error +{ + void report(e_kind kind, std::string msg, ...) + { + char buffer[BUFFER_SIZE]; + + va_list al; + va_start(al, msg); + std::vsnprintf(buffer, BUFFER_SIZE, msg.c_str(), al); + va_end(al); + + switch(kind) + { + case e_kind::message: std::cout << "\033[1;34m[MacroLibX] Message : \033[1;0m" << buffer << std::endl; break; + case e_kind::warning: std::cout << "\033[1;35m[MacroLibX] Warning : \033[1;0m" << buffer << std::endl; break; + case e_kind::error: std::cerr << "\033[1;31m[MacroLibX] Error : \033[1;0m" << buffer << std::endl; break; + case e_kind::fatal_error: + std::cerr << "\033[1;31m[MacroLibX] Fatal Error : \033[1;0m" << buffer << std::endl; + std::exit(EXIT_FAILURE); + break; + } + } +} diff --git a/MacroLibX/src/core/errors.h b/MacroLibX/src/core/errors.h new file mode 100644 index 0000000..9bfde94 --- /dev/null +++ b/MacroLibX/src/core/errors.h @@ -0,0 +1,32 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* errors.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 17:42:32 by maldavid #+# #+# */ +/* Updated: 2023/12/27 17:21:07 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_ERRORS__ +#define __MLX_ERRORS__ + +#include +#include + +enum class e_kind +{ + message, + warning, + error, + fatal_error +}; + +namespace mlx::core::error +{ + void report(e_kind kind, std::string msg, ...); +} + +#endif // __MLX_ERRORS__ diff --git a/MacroLibX/src/core/graphics.cpp b/MacroLibX/src/core/graphics.cpp new file mode 100644 index 0000000..cd936f7 --- /dev/null +++ b/MacroLibX/src/core/graphics.cpp @@ -0,0 +1,104 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* graphics.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ +/* Updated: 2023/12/27 21:27:48 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include + +namespace mlx +{ + GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, Texture* render_target, int id) : + _window(nullptr), + _text_put_pipeline(std::make_unique()), + _renderer(std::make_unique()), + _width(w), + _height(h), + _id(id) + { + _renderer->setWindow(nullptr); + _renderer->init(render_target); + _pixel_put_pipeline.init(w, h, *_renderer); + _text_put_pipeline->init(_renderer.get()); + } + + GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id) : + _window(std::make_shared(w, h, title)), + _text_put_pipeline(std::make_unique()), + _renderer(std::make_unique()), + _width(w), + _height(h), + _id(id) + { + _renderer->setWindow(_window.get()); + _renderer->init(nullptr); + _pixel_put_pipeline.init(w, h, *_renderer); + _text_put_pipeline->init(_renderer.get()); + } + + void GraphicsSupport::render() noexcept + { + if(!_renderer->beginFrame()) + return; + _proj = glm::ortho(0, _width, 0, _height); + _renderer->getUniformBuffer()->setData(sizeof(_proj), &_proj); + auto cmd_buff = _renderer->getActiveCmdBuffer().get(); + + static std::array sets = { + _renderer->getVertDescriptorSet().get(), + VK_NULL_HANDLE + }; + + for(auto& data : _textures_to_render) + { + if(!data.texture->isInit()) + continue; + if(data.texture->getSet() == VK_NULL_HANDLE) + data.texture->setDescriptor(_renderer->getFragDescriptorSet().duplicate()); + if(data.texture->getLayout() != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) + data.texture->transitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + if(!data.texture->hasBeenUpdated()) + data.texture->updateSet(0); + sets[1] = data.texture->getSet(); + vkCmdBindDescriptorSets(cmd_buff, VK_PIPELINE_BIND_POINT_GRAPHICS, _renderer->getPipeline().getPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); + data.texture->render(*_renderer, data.x, data.y); + } + + _pixel_put_pipeline.present(); + + sets[1] = _pixel_put_pipeline.getDescriptorSet(); + vkCmdBindDescriptorSets(cmd_buff, VK_PIPELINE_BIND_POINT_GRAPHICS, _renderer->getPipeline().getPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); + _pixel_put_pipeline.render(*_renderer); + _text_put_pipeline->render(sets); + _renderer->endFrame(); + + for(auto& data : _textures_to_render) + data.texture->resetUpdate(); + + #ifdef GRAPHICS_MEMORY_DUMP + // dump memory to file every two seconds + static uint64_t timer = SDL_GetTicks64(); + if(SDL_GetTicks64() - timer > 2000) + { + Render_Core::get().getAllocator().dumpMemoryToJson(); + timer += 2000; + } + #endif + } + + GraphicsSupport::~GraphicsSupport() + { + vkDeviceWaitIdle(Render_Core::get().getDevice().get()); + _text_put_pipeline->destroy(); + _pixel_put_pipeline.destroy(); + _renderer->destroy(); + if(_window) + _window->destroy(); + } +} diff --git a/MacroLibX/src/core/graphics.h b/MacroLibX/src/core/graphics.h new file mode 100644 index 0000000..04f66b0 --- /dev/null +++ b/MacroLibX/src/core/graphics.h @@ -0,0 +1,67 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* graphics.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ +/* Updated: 2023/12/24 08:56:14 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_GRAPHICS__ +#define __MLX_GRAPHICS__ + +#include +#include +#include + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + class GraphicsSupport : public non_copyable + { + public: + GraphicsSupport(std::size_t w, std::size_t h, Texture* render_target, int id); + GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id); + + inline int& getID() noexcept; + inline std::shared_ptr getWindow(); + + void render() noexcept; + + inline void clearRenderData() noexcept; + inline void pixelPut(int x, int y, uint32_t color) noexcept; + inline void stringPut(int x, int y, int color, std::string str); + inline void texturePut(Texture* texture, int x, int y); + inline void loadFont(const std::filesystem::path& filepath, float scale); + + ~GraphicsSupport(); + + private: + std::vector _textures_to_render; + PixelPutPipeline _pixel_put_pipeline; + glm::mat4 _proj = glm::mat4(1.0); + std::shared_ptr _window; + std::unique_ptr _text_put_pipeline; // unique_ptr because of the size of the class + std::unique_ptr _renderer; + std::size_t _width = 0; + std::size_t _height = 0; + int _id; + }; +} + +#include + +#endif diff --git a/MacroLibX/src/core/graphics.inl b/MacroLibX/src/core/graphics.inl new file mode 100644 index 0000000..a03bd9f --- /dev/null +++ b/MacroLibX/src/core/graphics.inl @@ -0,0 +1,50 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* graphics.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ +/* Updated: 2023/04/02 15:26:16 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include + +namespace mlx +{ + int& GraphicsSupport::getID() noexcept { return _id; } + std::shared_ptr GraphicsSupport::getWindow() { return _window; } + + void GraphicsSupport::clearRenderData() noexcept + { + _textures_to_render.clear(); + _pixel_put_pipeline.clear(); + _text_put_pipeline->clear(); + } + + void GraphicsSupport::pixelPut(int x, int y, uint32_t color) noexcept + { + _pixel_put_pipeline.setPixel(x, y, color); + } + + void GraphicsSupport::stringPut(int x, int y, int color, std::string str) + { + _text_put_pipeline->put(x, y, color, str); + } + + void GraphicsSupport::texturePut(Texture* texture, int x, int y) + { + _textures_to_render.emplace_back(texture, x, y); + auto it = std::find(_textures_to_render.begin(), _textures_to_render.end() - 1, _textures_to_render.back()); + if(it != _textures_to_render.end() - 1) + _textures_to_render.erase(it); + } + + void GraphicsSupport::loadFont(const std::filesystem::path& filepath, float scale) + { + _text_put_pipeline->loadFont(filepath, scale); + } +} diff --git a/MacroLibX/src/core/memory.cpp b/MacroLibX/src/core/memory.cpp new file mode 100644 index 0000000..8edf9a3 --- /dev/null +++ b/MacroLibX/src/core/memory.cpp @@ -0,0 +1,66 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* memory.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/12/07 16:32:01 by kbz_8 #+# #+# */ +/* Updated: 2023/12/11 15:25:02 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include + +namespace mlx +{ + void* MemManager::malloc(std::size_t size) + { + void* ptr = std::malloc(size); + if(ptr != nullptr) + _blocks.push_back(ptr); + return ptr; + } + + void* MemManager::calloc(std::size_t n, std::size_t size) + { + void* ptr = std::calloc(n, size); + if(ptr != nullptr) + _blocks.push_back(ptr); + return ptr; + } + + void* MemManager::realloc(void* ptr, std::size_t size) + { + void* ptr2 = std::realloc(ptr, size); + if(ptr2 != nullptr) + _blocks.push_back(ptr2); + auto it = std::find(_blocks.begin(), _blocks.end(), ptr); + if(it != _blocks.end()) + _blocks.erase(it); + return ptr2; + } + + void MemManager::free(void* ptr) + { + auto it = std::find(_blocks.begin(), _blocks.end(), ptr); + if(it == _blocks.end()) + { + core::error::report(e_kind::error, "Memory Manager : trying to free a pointer not allocated by the memory manager"); + return; + } + std::free(*it); + _blocks.erase(it); + } + + MemManager::~MemManager() + { + std::for_each(_blocks.begin(), _blocks.end(), [](void* ptr) + { + std::free(ptr); + }); + } +} diff --git a/MacroLibX/src/core/memory.h b/MacroLibX/src/core/memory.h new file mode 100644 index 0000000..6d61b88 --- /dev/null +++ b/MacroLibX/src/core/memory.h @@ -0,0 +1,41 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* memory.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/12/07 16:31:51 by kbz_8 #+# #+# */ +/* Updated: 2023/12/11 19:47:13 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_MEMORY__ +#define __MLX_MEMORY__ + +#include +#include +#include + +namespace mlx +{ + class MemManager : public Singleton + { + friend class Singleton; + + public: + static void* malloc(std::size_t size); + static void* calloc(std::size_t n, std::size_t size); + static void* realloc(void* ptr, std::size_t size); + static void free(void* ptr); + + private: + MemManager() = default; + ~MemManager(); + + private: + inline static std::list _blocks; + }; +} + +#endif diff --git a/MacroLibX/src/platform/inputs.cpp b/MacroLibX/src/platform/inputs.cpp new file mode 100644 index 0000000..bb369db --- /dev/null +++ b/MacroLibX/src/platform/inputs.cpp @@ -0,0 +1,150 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* inputs.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/05 16:30:19 by maldavid #+# #+# */ +/* Updated: 2023/12/11 19:01:14 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "inputs.h" +#include +#include + +namespace mlx +{ + void Input::update() + { + _xRel = 0; + _yRel = 0; + + while(SDL_PollEvent(&_event)) + { + if(_event.type == SDL_MOUSEMOTION) + { + _x = _event.motion.x; + _y = _event.motion.y; + + _xRel = _event.motion.xrel; + _yRel = _event.motion.yrel; + } + + uint32_t id = _event.window.windowID; + if(!_events_hooks.count(id)) + continue; + auto& hooks = _events_hooks[id]; + + switch(_event.type) + { + case SDL_KEYDOWN: + { + if(hooks[MLX_KEYDOWN].hook) + hooks[MLX_KEYDOWN].hook(_event.key.keysym.scancode, hooks[MLX_KEYDOWN].param); + break; + } + + case SDL_KEYUP: + { + if(hooks[MLX_KEYUP].hook) + hooks[MLX_KEYUP].hook(_event.key.keysym.scancode, hooks[MLX_KEYUP].param); + break; + } + + case SDL_MOUSEBUTTONDOWN: + { + if(hooks[MLX_MOUSEDOWN].hook) + hooks[MLX_MOUSEDOWN].hook(_event.button.button, hooks[MLX_MOUSEDOWN].param); + break; + } + + case SDL_MOUSEBUTTONUP: + { + if(hooks[MLX_MOUSEUP].hook) + hooks[MLX_MOUSEUP].hook(_event.button.button, hooks[MLX_MOUSEUP].param); + break; + } + + case SDL_MOUSEWHEEL: + { + if(hooks[MLX_MOUSEWHEEL].hook) + { + if(_event.wheel.y > 0) // scroll up + hooks[MLX_MOUSEWHEEL].hook(1, hooks[MLX_MOUSEWHEEL].param); + else if(_event.wheel.y < 0) // scroll down + hooks[MLX_MOUSEWHEEL].hook(2, hooks[MLX_MOUSEWHEEL].param); + + if(_event.wheel.x > 0) // scroll right + hooks[MLX_MOUSEWHEEL].hook(3, hooks[MLX_MOUSEWHEEL].param); + else if(_event.wheel.x < 0) // scroll left + hooks[MLX_MOUSEWHEEL].hook(4, hooks[MLX_MOUSEWHEEL].param); + } + break; + } + + case SDL_WINDOWEVENT: + { + auto& win_hook = hooks[MLX_WINDOW_EVENT]; + switch(_event.window.event) + { + case SDL_WINDOWEVENT_CLOSE: + { + if(win_hook.hook) + win_hook.hook(0, win_hook.param); + break; + } + case SDL_WINDOWEVENT_MOVED: + { + if(win_hook.hook) + win_hook.hook(1, win_hook.param); + break; + } + case SDL_WINDOWEVENT_MINIMIZED: + { + if(win_hook.hook) + win_hook.hook(2, win_hook.param); + break; + } + case SDL_WINDOWEVENT_MAXIMIZED: + { + if(win_hook.hook) + win_hook.hook(3, win_hook.param); + break; + } + case SDL_WINDOWEVENT_ENTER: + { + if(win_hook.hook) + win_hook.hook(4, win_hook.param); + break; + } + case SDL_WINDOWEVENT_FOCUS_GAINED: + { + if(win_hook.hook) + win_hook.hook(4, win_hook.param); + break; + } + case SDL_WINDOWEVENT_LEAVE: + { + if(win_hook.hook) + win_hook.hook(5, win_hook.param); + break; + } + case SDL_WINDOWEVENT_FOCUS_LOST: + { + if(win_hook.hook) + win_hook.hook(4, win_hook.param); + break; + } + + default : break; + } + break; + } + + default: break; + } + } + } +} diff --git a/MacroLibX/src/platform/inputs.h b/MacroLibX/src/platform/inputs.h new file mode 100644 index 0000000..9e68289 --- /dev/null +++ b/MacroLibX/src/platform/inputs.h @@ -0,0 +1,77 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* inputs.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ +/* Updated: 2023/12/11 19:47:20 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "window.h" + +namespace mlx +{ + struct Hook + { + std::function hook; + void* param = nullptr; + }; + + class Input + { + public: + Input() = default; + + void update(); + + inline bool isMouseMoving() const noexcept { return _xRel || _yRel; } + + inline int getX() const noexcept { return _x; } + inline int getY() const noexcept { return _y; } + + inline int getXRel() const noexcept { return _xRel; } + inline int getYRel() const noexcept { return _yRel; } + + inline bool is_running() const noexcept { return !_end; } + inline constexpr void finish() noexcept { _end = true; } + + inline void addWindow(std::shared_ptr window) + { + _windows[window->getID()] = window; + _events_hooks[window->getID()] = {}; + } + + inline void onEvent(uint32_t id, int event, int (*funct_ptr)(int, void*), void* param) noexcept + { + _events_hooks[id][event].hook = funct_ptr; + _events_hooks[id][event].param = param; + } + + ~Input() = default; + + private: + std::unordered_map> _windows; + std::unordered_map> _events_hooks; + SDL_Event _event; + + int _x = 0; + int _y = 0; + int _xRel = 0; + int _yRel = 0; + + bool _end = false; + }; +} diff --git a/MacroLibX/src/platform/window.cpp b/MacroLibX/src/platform/window.cpp new file mode 100644 index 0000000..aee77aa --- /dev/null +++ b/MacroLibX/src/platform/window.cpp @@ -0,0 +1,57 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* window.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 17:36:44 by maldavid #+# #+# */ +/* Updated: 2023/12/27 16:57:28 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include + +namespace mlx +{ + #if SDL_BYTEORDER == SDL_BIG_ENDIAN + constexpr const uint32_t rmask = 0xff000000; + constexpr const uint32_t gmask = 0x00ff0000; + constexpr const uint32_t bmask = 0x0000ff00; + constexpr const uint32_t amask = 0x000000ff; + #else + constexpr const uint32_t rmask = 0x000000ff; + constexpr const uint32_t gmask = 0x0000ff00; + constexpr const uint32_t bmask = 0x00ff0000; + constexpr const uint32_t amask = 0xff000000; + #endif + + MLX_Window::MLX_Window(std::size_t w, std::size_t h, const std::string& title) : _width(w), _height(h) + { + if(title.find("vvaas") != std::string::npos) + core::error::report(e_kind::message, "vvaas est mauvais"); + _win = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | SDL_WINDOW_SHOWN); + if(!_win) + core::error::report(e_kind::fatal_error, std::string("unable to open a new window, ") + SDL_GetError()); + _id = SDL_GetWindowID(_win); + _icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); + SDL_SetWindowIcon(_win, _icon); + } + + void MLX_Window::destroy() noexcept + { + if(_win != nullptr) + { + SDL_DestroyWindow(_win); + _win = nullptr; + } + if(_icon != nullptr) + { + SDL_FreeSurface(_icon); + _icon = nullptr; + } + } +} diff --git a/MacroLibX/src/platform/window.h b/MacroLibX/src/platform/window.h new file mode 100644 index 0000000..876c14b --- /dev/null +++ b/MacroLibX/src/platform/window.h @@ -0,0 +1,45 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* window.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 21:53:12 by maldavid #+# #+# */ +/* Updated: 2023/12/21 00:24:26 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_WINDOW__ +#define __MLX_WINDOW__ + +#include +#include +#include + +namespace mlx +{ + class MLX_Window + { + public: + MLX_Window(std::size_t w, std::size_t h, const std::string& title); + + inline SDL_Window* getNativeWindow() const noexcept { return _win; } + inline int getWidth() const noexcept { return _width; } + inline int getHeight() const noexcept { return _height; } + inline uint32_t getID() const noexcept { return _id; } + + void destroy() noexcept; + + ~MLX_Window() = default; + + private: + SDL_Surface* _icon = nullptr; + SDL_Window* _win = nullptr; + int _width = 0; + int _height = 0; + uint32_t _id = -1; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/buffers/vk_buffer.cpp b/MacroLibX/src/renderer/buffers/vk_buffer.cpp new file mode 100644 index 0000000..903de12 --- /dev/null +++ b/MacroLibX/src/renderer/buffers/vk_buffer.cpp @@ -0,0 +1,159 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_buffer.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 18:55:57 by maldavid #+# #+# */ +/* Updated: 2023/12/16 17:10:17 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_buffer.h" +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + void Buffer::create(Buffer::kind type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data) + { + _usage = usage; + if(type == Buffer::kind::constant || type == Buffer::kind::dynamic_device_local) + { + if(data == nullptr && type == Buffer::kind::constant) + { + core::error::report(e_kind::warning, "Vulkan : trying to create constant buffer without data (constant buffers cannot be modified after creation)"); + return; + } + _usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + } + + VmaAllocationCreateInfo alloc_info{}; + alloc_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; + alloc_info.usage = VMA_MEMORY_USAGE_AUTO; + + createBuffer(_usage, alloc_info, size, name); + + if(data != nullptr) + { + void* mapped = nullptr; + mapMem(&mapped); + std::memcpy(mapped, data, size); + unmapMem(); + if(type == Buffer::kind::constant || type == Buffer::kind::dynamic_device_local) + pushToGPU(); + } + } + + void Buffer::destroy() noexcept + { + if(_is_mapped) + unmapMem(); + if(_buffer != VK_NULL_HANDLE) + Render_Core::get().getAllocator().destroyBuffer(_allocation, _buffer); + _buffer = VK_NULL_HANDLE; + } + + void Buffer::createBuffer(VkBufferUsageFlags usage, VmaAllocationCreateInfo info, VkDeviceSize size, [[maybe_unused]] const char* name) + { + VkBufferCreateInfo bufferInfo{}; + bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + bufferInfo.size = size; + bufferInfo.usage = usage; + bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + + #ifdef DEBUG + _name = name; + std::string alloc_name = _name; + if(usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) + alloc_name.append("_index_buffer"); + else if(usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) + alloc_name.append("_vertex_buffer"); + else if(!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) + alloc_name.append("_buffer"); + _allocation = Render_Core::get().getAllocator().createBuffer(&bufferInfo, &info, _buffer, alloc_name.c_str()); + #else + _allocation = Render_Core::get().getAllocator().createBuffer(&bufferInfo, &info, _buffer, nullptr); + #endif + _size = size; + } + + bool Buffer::copyFromBuffer(const Buffer& buffer) noexcept + { + if(!(_usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT)) + { + core::error::report(e_kind::error, "Vulkan : buffer cannot be the destination of a copy because it does not have the correct usage flag"); + return false; + } + if(!(buffer._usage & VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) + { + core::error::report(e_kind::error, "Vulkan : buffer cannot be the source of a copy because it does not have the correct usage flag"); + return false; + } + + // TODO, use global cmd buffer pool to manage resources + CmdBuffer& cmd = Render_Core::get().getSingleTimeCmdBuffer(); + cmd.beginRecord(); + + VkBufferCopy copyRegion{}; + copyRegion.size = _size; + vkCmdCopyBuffer(cmd.get(), buffer._buffer, _buffer, 1, ©Region); + + cmd.endRecord(); + cmd.submitIdle(); + + return true; + } + + void Buffer::pushToGPU() noexcept + { + VmaAllocationCreateInfo alloc_info{}; + alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; + + Buffer newBuffer; + newBuffer._usage = (_usage & 0xFFFFFFFC) | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + #ifdef DEBUG + std::string new_name = _name + "_GPU"; + newBuffer.createBuffer(newBuffer._usage, alloc_info, _size, new_name.c_str()); + #else + newBuffer.createBuffer(newBuffer._usage, alloc_info, _size, nullptr); + #endif + + if(newBuffer.copyFromBuffer(*this)) // if the copy succeded we swap the buffers, else the new one is deleted + this->swap(newBuffer); + newBuffer.destroy(); + } + + void Buffer::swap(Buffer& buffer) noexcept + { + VkBuffer temp_b = _buffer; + _buffer = buffer._buffer; + buffer._buffer = temp_b; + + VmaAllocation temp_a = buffer._allocation; + buffer._allocation = _allocation; + _allocation = temp_a; + + VkDeviceSize temp_size = buffer._size; + buffer._size = _size; + _size = temp_size; + + VkDeviceSize temp_offset = buffer._offset; + buffer._offset = _offset; + _offset = temp_offset; + + VkBufferUsageFlags temp_u = _usage; + _usage = buffer._usage; + buffer._usage = temp_u; + } + + void Buffer::flush(VkDeviceSize size, VkDeviceSize offset) + { + Render_Core::get().getAllocator().flush(_allocation, size, offset); + } +} diff --git a/MacroLibX/src/renderer/buffers/vk_buffer.h b/MacroLibX/src/renderer/buffers/vk_buffer.h new file mode 100644 index 0000000..203b1ce --- /dev/null +++ b/MacroLibX/src/renderer/buffers/vk_buffer.h @@ -0,0 +1,64 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_buffer.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 23:18:52 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:56 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_BUFFER__ +#define __MLX_VK_BUFFER__ + +#include +#include +#include + +namespace mlx +{ + class Buffer + { + public: + enum class kind { dynamic, dynamic_device_local, uniform, constant }; + + void create(kind type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data = nullptr); + void destroy() noexcept; + + inline void mapMem(void** data) noexcept { Render_Core::get().getAllocator().mapMemory(_allocation, data); _is_mapped = true; } + inline bool isMapped() const noexcept { return _is_mapped; } + inline void unmapMem() noexcept { Render_Core::get().getAllocator().unmapMemory(_allocation); _is_mapped = false; } + + void flush(VkDeviceSize size = VK_WHOLE_SIZE, VkDeviceSize offset = 0); + bool copyFromBuffer(const Buffer& buffer) noexcept; + + inline VkBuffer& operator()() noexcept { return _buffer; } + inline VkBuffer& get() noexcept { return _buffer; } + inline VkDeviceSize getSize() const noexcept { return _size; } + inline VkDeviceSize getOffset() const noexcept { return _offset; } + + protected: + void pushToGPU() noexcept; + void swap(Buffer& buffer) noexcept; + + protected: + VmaAllocation _allocation; + VkBuffer _buffer = VK_NULL_HANDLE; + VkDeviceSize _offset = 0; + VkDeviceSize _size = 0; + + private: + void createBuffer(VkBufferUsageFlags usage, VmaAllocationCreateInfo info, VkDeviceSize size, const char* name); + + private: + #ifdef DEBUG + std::string _name; + #endif + VkBufferUsageFlags _usage = 0; + bool _is_mapped = false; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/buffers/vk_ibo.h b/MacroLibX/src/renderer/buffers/vk_ibo.h new file mode 100644 index 0000000..ad49eed --- /dev/null +++ b/MacroLibX/src/renderer/buffers/vk_ibo.h @@ -0,0 +1,31 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_ibo.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/25 15:05:05 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:27:02 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __VK_IBO__ +#define __VK_IBO__ + +#include +#include +#include "vk_buffer.h" +#include + +namespace mlx +{ + class C_IBO : public Buffer + { + public: + inline void create(uint32_t size, const uint16_t* data, const char* name) { Buffer::create(Buffer::kind::constant, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, name, data); } + inline void bind(Renderer& renderer) noexcept { vkCmdBindIndexBuffer(renderer.getActiveCmdBuffer().get(), _buffer, _offset, VK_INDEX_TYPE_UINT16); } + }; +} + +#endif diff --git a/MacroLibX/src/renderer/buffers/vk_ubo.cpp b/MacroLibX/src/renderer/buffers/vk_ubo.cpp new file mode 100644 index 0000000..2369b5b --- /dev/null +++ b/MacroLibX/src/renderer/buffers/vk_ubo.cpp @@ -0,0 +1,74 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_ubo.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:45:52 by maldavid #+# #+# */ +/* Updated: 2023/12/10 22:22:28 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_ubo.h" +#include +#include + +namespace mlx +{ + void UBO::create(Renderer* renderer, uint32_t size, [[maybe_unused]] const char* name) + { + _renderer = renderer; + + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + #ifdef DEBUG + std::string name_frame = name; + name_frame.append(std::to_string(i)); + _buffers[i].create(Buffer::kind::uniform, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, name_frame.c_str()); + #else + _buffers[i].create(Buffer::kind::uniform, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, nullptr); + #endif + _buffers[i].mapMem(&_maps[i]); + if(_maps[i] == nullptr) + core::error::report(e_kind::fatal_error, "Vulkan : unable to map a uniform buffer"); + } + } + + void UBO::setData(uint32_t size, const void* data) + { + std::memcpy(_maps[_renderer->getActiveImageIndex()], data, static_cast(size)); + } + + void UBO::setDynamicData(uint32_t size, const void* data) + { + std::memcpy(_maps[_renderer->getActiveImageIndex()], data, static_cast(size)); + _buffers[_renderer->getActiveImageIndex()].flush(); + } + + unsigned int UBO::getSize() noexcept + { + return _buffers[_renderer->getActiveImageIndex()].getSize(); + } + + unsigned int UBO::getOffset() noexcept + { + return _buffers[_renderer->getActiveImageIndex()].getOffset(); + } + + VkBuffer& UBO::operator()() noexcept + { + return _buffers[_renderer->getActiveImageIndex()].get(); + } + + VkBuffer& UBO::get() noexcept + { + return _buffers[_renderer->getActiveImageIndex()].get(); + } + + void UBO::destroy() noexcept + { + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + _buffers[i].destroy(); + } +} diff --git a/MacroLibX/src/renderer/buffers/vk_ubo.h b/MacroLibX/src/renderer/buffers/vk_ubo.h new file mode 100644 index 0000000..6dcdc21 --- /dev/null +++ b/MacroLibX/src/renderer/buffers/vk_ubo.h @@ -0,0 +1,51 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_ubo.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:45:29 by maldavid #+# #+# */ +/* Updated: 2023/12/08 19:06:28 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_UBO__ +#define __MLX_VK_UBO__ + +#include "vk_buffer.h" +#include +#include +#include + +namespace mlx +{ + class UBO + { + public: + void create(class Renderer* renderer, uint32_t size, const char* name); + + void setData(uint32_t size, const void* data); + void setDynamicData(uint32_t size, const void* data); + + void destroy() noexcept; + + unsigned int getSize() noexcept; + unsigned int getOffset() noexcept; + VkDeviceMemory getDeviceMemory() noexcept; + VkBuffer& operator()() noexcept; + VkBuffer& get() noexcept; + + inline unsigned int getSize(int i) noexcept { return _buffers[i].getSize(); } + inline unsigned int getOffset(int i) noexcept { return _buffers[i].getOffset(); } + inline VkBuffer& operator()(int i) noexcept { return _buffers[i].get(); } + inline VkBuffer& get(int i) noexcept { return _buffers[i].get(); } + + private: + std::array _buffers; + std::array _maps; + class Renderer* _renderer = nullptr; + }; +} + +#endif // __MLX_VK_UBO__ diff --git a/MacroLibX/src/renderer/buffers/vk_vbo.cpp b/MacroLibX/src/renderer/buffers/vk_vbo.cpp new file mode 100644 index 0000000..8ceab7d --- /dev/null +++ b/MacroLibX/src/renderer/buffers/vk_vbo.cpp @@ -0,0 +1,55 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_vbo.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:28:08 by maldavid #+# #+# */ +/* Updated: 2023/12/12 22:17:14 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_vbo.h" +#include + +namespace mlx +{ + void VBO::setData(uint32_t size, const void* data) + { + if(size > getSize()) + { + core::error::report(e_kind::error, "Vulkan : trying to store to much data in a vertex buffer (%d bytes in %d bytes)", size, getSize()); + return; + } + + if(data == nullptr) + core::error::report(e_kind::warning, "Vulkan : mapping null data in a vertex buffer"); + + void* temp = nullptr; + mapMem(&temp); + std::memcpy(temp, data, static_cast(size)); + unmapMem(); + } + + void D_VBO::setData(uint32_t size, const void* data) + { + if(size > getSize()) + { + core::error::report(e_kind::error, "Vulkan : trying to store to much data in a vertex buffer (%d bytes in %d bytes)", size, getSize()); + return; + } + + if(data == nullptr) + core::error::report(e_kind::warning, "Vulkan : mapping null data in a vertex buffer"); + + Buffer tmp_buf; + #ifdef DEBUG + tmp_buf.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, "tmp_buffer", data); + #else + tmp_buf.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, data); + #endif + copyFromBuffer(tmp_buf); + tmp_buf.destroy(); + } +} diff --git a/MacroLibX/src/renderer/buffers/vk_vbo.h b/MacroLibX/src/renderer/buffers/vk_vbo.h new file mode 100644 index 0000000..1bf2209 --- /dev/null +++ b/MacroLibX/src/renderer/buffers/vk_vbo.h @@ -0,0 +1,46 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_vbo.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:27:38 by maldavid #+# #+# */ +/* Updated: 2023/12/12 22:46:21 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_VBO__ +#define __MLX_VK_VBO__ + +#include "vk_buffer.h" +#include +#include + +namespace mlx +{ + class VBO : public Buffer + { + public: + inline void create(uint32_t size, const void* data, const char* name) { Buffer::create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } + void setData(uint32_t size, const void* data); + inline void bind(Renderer& renderer) noexcept { vkCmdBindVertexBuffers(renderer.getActiveCmdBuffer().get(), 0, 1, &_buffer, &_offset); } + }; + + class D_VBO : public Buffer + { + public: + inline void create(uint32_t size, const void* data, const char* name) { Buffer::create(Buffer::kind::dynamic_device_local, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } + void setData(uint32_t size, const void* data); + inline void bind(Renderer& renderer) noexcept { vkCmdBindVertexBuffers(renderer.getActiveCmdBuffer().get(), 0, 1, &_buffer, &_offset); } + }; + + class C_VBO : public Buffer + { + public: + inline void create(uint32_t size, const void* data, const char* name) { Buffer::create(Buffer::kind::constant, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } + inline void bind(Renderer& renderer) noexcept { vkCmdBindVertexBuffers(renderer.getActiveCmdBuffer().get(), 0, 1, &_buffer, &_offset); } + }; +} + +#endif // __MLX_VK_VBO__ diff --git a/MacroLibX/src/renderer/command/cmd_manager.cpp b/MacroLibX/src/renderer/command/cmd_manager.cpp new file mode 100644 index 0000000..37524a5 --- /dev/null +++ b/MacroLibX/src/renderer/command/cmd_manager.cpp @@ -0,0 +1,40 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* cmd_manager.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 17:50:52 by maldavid #+# #+# */ +/* Updated: 2023/04/02 17:51:46 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include + +namespace mlx +{ + void CmdManager::init() noexcept + { + _cmd_pool.init(); + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + _cmd_buffers[i].init(this); + } + + void CmdManager::beginRecord(int active_image_index) + { + _cmd_buffers[active_image_index].beginRecord(); + } + + void CmdManager::endRecord(int active_image_index) + { + _cmd_buffers[active_image_index].endRecord(); + } + + void CmdManager::destroy() noexcept + { + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + _cmd_buffers[i].destroy(); + _cmd_pool.destroy(); + } +} diff --git a/MacroLibX/src/renderer/command/cmd_manager.h b/MacroLibX/src/renderer/command/cmd_manager.h new file mode 100644 index 0000000..ac69876 --- /dev/null +++ b/MacroLibX/src/renderer/command/cmd_manager.h @@ -0,0 +1,47 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* cmd_manager.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 17:48:52 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:27:35 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_COMMAND_MANAGER__ +#define __MLX_COMMAND_MANAGER__ + +#include + +#include +#include +#include +#include +#include + +namespace mlx +{ + class CmdManager + { + public: + CmdManager() = default; + + void init() noexcept; + void beginRecord(int active_image_index); + void endRecord(int active_image_index); + void destroy() noexcept; + + inline CmdPool& getCmdPool() noexcept { return _cmd_pool; } + inline CmdBuffer& getCmdBuffer(int i) noexcept { return _cmd_buffers[i]; } + + ~CmdManager() = default; + + private: + std::array _cmd_buffers; + CmdPool _cmd_pool; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/command/single_time_cmd_manager.cpp b/MacroLibX/src/renderer/command/single_time_cmd_manager.cpp new file mode 100644 index 0000000..7d1ffc8 --- /dev/null +++ b/MacroLibX/src/renderer/command/single_time_cmd_manager.cpp @@ -0,0 +1,53 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* single_time_cmd_manager.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/12/15 19:57:49 by maldavid #+# #+# */ +/* Updated: 2023/12/16 18:46:26 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include + +namespace mlx +{ + SingleTimeCmdManager::SingleTimeCmdManager() : _buffers(MIN_POOL_SIZE) {} + + void SingleTimeCmdManager::init() noexcept + { + _pool.init(); + for(int i = 0; i < MIN_POOL_SIZE; i++) + { + _buffers.emplace_back(); + _buffers.back().init(&_pool); + } + } + + CmdBuffer& SingleTimeCmdManager::getCmdBuffer() noexcept + { + for(CmdBuffer& buf : _buffers) + { + if(buf.isReadyToBeUsed()) + { + buf.reset(); + return buf; + } + } + _buffers.emplace_back().init(&_pool); + return _buffers.back(); + } + + void SingleTimeCmdManager::destroy() noexcept + { + std::for_each(_buffers.begin(), _buffers.end(), [](CmdBuffer& buf) + { + buf.destroy(); + }); + _pool.destroy(); + } +} diff --git a/MacroLibX/src/renderer/command/single_time_cmd_manager.h b/MacroLibX/src/renderer/command/single_time_cmd_manager.h new file mode 100644 index 0000000..bf08869 --- /dev/null +++ b/MacroLibX/src/renderer/command/single_time_cmd_manager.h @@ -0,0 +1,46 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* single_time_cmd_manager.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/12/15 18:25:57 by maldavid #+# #+# */ +/* Updated: 2023/12/16 18:09:56 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_SINGLE_TIME_CMD_MANAGER__ +#define __MLX_SINGLE_TIME_CMD_MANAGER__ + +#include + +#include +#include + +namespace mlx +{ + class SingleTimeCmdManager + { + public: + SingleTimeCmdManager(); + + void init() noexcept; + void destroy() noexcept; + + inline CmdPool& getCmdPool() noexcept { return _pool; } + CmdBuffer& getCmdBuffer() noexcept; + + ~SingleTimeCmdManager() = default; + + inline static constexpr const uint8_t MIN_POOL_SIZE = 8; + + private: + + private: + std::vector _buffers; + CmdPool _pool; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/command/vk_cmd_buffer.cpp b/MacroLibX/src/renderer/command/vk_cmd_buffer.cpp new file mode 100644 index 0000000..7405dc7 --- /dev/null +++ b/MacroLibX/src/renderer/command/vk_cmd_buffer.cpp @@ -0,0 +1,137 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_cmd_buffer.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:26:06 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:12:58 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_cmd_buffer.h" +#include +#include +#include + +namespace mlx +{ + void CmdBuffer::init(CmdManager* manager) + { + init(&manager->getCmdPool()); + } + + void CmdBuffer::init(CmdPool* pool) + { + _pool = pool; + + VkCommandBufferAllocateInfo allocInfo{}; + allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + allocInfo.commandPool = pool->get(); + allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; + allocInfo.commandBufferCount = 1; + + VkResult res = vkAllocateCommandBuffers(Render_Core::get().getDevice().get(), &allocInfo, &_cmd_buffer); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to allocate command buffer, %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new command buffer"); + #endif + + _fence.init(); + _state = state::idle; + } + + void CmdBuffer::beginRecord(VkCommandBufferUsageFlags usage) + { + if(!isInit()) + core::error::report(e_kind::fatal_error, "Vulkan : begenning record on un uninit command buffer"); + if(_state == state::recording) + return; + + VkCommandBufferBeginInfo beginInfo{}; + beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + beginInfo.flags = usage; + if(vkBeginCommandBuffer(_cmd_buffer, &beginInfo) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to begin recording command buffer"); + + _state = state::recording; + } + + void CmdBuffer::endRecord() + { + if(!isInit()) + core::error::report(e_kind::fatal_error, "Vulkan : ending record on un uninit command buffer"); + if(_state != state::recording) + return; + if(vkEndCommandBuffer(_cmd_buffer) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to end recording command buffer"); + + _state = state::idle; + } + + void CmdBuffer::submitIdle() noexcept + { + auto device = Render_Core::get().getDevice().get(); + + VkSubmitInfo submitInfo = {}; + submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submitInfo.commandBufferCount = 1; + submitInfo.pCommandBuffers = &_cmd_buffer; + + VkFenceCreateInfo fenceCreateInfo = {}; + fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + + VkFence fence; + vkCreateFence(device, &fenceCreateInfo, nullptr, &fence); + vkResetFences(device, 1, &fence); + VkResult res = vkQueueSubmit(Render_Core::get().getQueue().getGraphic(), 1, &submitInfo, fence); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan error : failed to submit a single time command buffer, %s", RCore::verbaliseResultVk(res)); + _state = state::submitted; + vkWaitForFences(device, 1, &fence, VK_TRUE, UINT64_MAX); + vkDestroyFence(device, fence, nullptr); + _state = state::ready; + } + + void CmdBuffer::submit(Semaphore* semaphores) noexcept + { + std::array signalSemaphores; + std::array waitSemaphores; + + if(semaphores != nullptr) + { + signalSemaphores[0] = semaphores->getRenderImageSemaphore(); + waitSemaphores[0] = semaphores->getImageSemaphore(); + } + else + { + signalSemaphores[0] = nullptr; + waitSemaphores[0] = nullptr; + } + VkPipelineStageFlags waitStages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; + + VkSubmitInfo submitInfo{}; + submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submitInfo.waitSemaphoreCount = (semaphores == nullptr ? 0 : waitSemaphores.size()); + submitInfo.pWaitSemaphores = waitSemaphores.data(); + submitInfo.pWaitDstStageMask = waitStages; + submitInfo.commandBufferCount = 1; + submitInfo.pCommandBuffers = &_cmd_buffer; + submitInfo.signalSemaphoreCount = (semaphores == nullptr ? 0 : signalSemaphores.size()); + submitInfo.pSignalSemaphores = signalSemaphores.data(); + + VkResult res = vkQueueSubmit(Render_Core::get().getQueue().getGraphic(), 1, &submitInfo, _fence.get()); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan error : failed to submit draw command buffer, %s", RCore::verbaliseResultVk(res)); + _state = state::submitted; + } + + void CmdBuffer::destroy() noexcept + { + _fence.destroy(); + _cmd_buffer = VK_NULL_HANDLE; + _state = state::uninit; + } +} diff --git a/MacroLibX/src/renderer/command/vk_cmd_buffer.h b/MacroLibX/src/renderer/command/vk_cmd_buffer.h new file mode 100644 index 0000000..1e7bccf --- /dev/null +++ b/MacroLibX/src/renderer/command/vk_cmd_buffer.h @@ -0,0 +1,64 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_cmd_buffer.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:25:42 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:27:20 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_CMD_BUFFER__ +#define __MLX_VK_CMD_BUFFER__ + +#include +#include +#include + +namespace mlx +{ + class CmdBuffer + { + public: + enum class state + { + uninit = 0, // buffer not initialized or destroyed + ready, // buffer ready to be used after having been submitted + idle, // buffer has recorded informations but has not been submitted + recording, // buffer is currently recording + submitted, // buffer has been submitted + }; + + public: + void init(class CmdManager* manager); + void init(class CmdPool* pool); + void destroy() noexcept; + + void beginRecord(VkCommandBufferUsageFlags usage = 0); + void submit(class Semaphore* semaphores) noexcept; + void submitIdle() noexcept; + inline void waitForExecution() noexcept { _fence.waitAndReset(); _state = state::ready; } + inline void reset() noexcept { vkResetCommandBuffer(_cmd_buffer, 0); } + void endRecord(); + + inline bool isInit() const noexcept { return _state != state::uninit; } + inline bool isReadyToBeUsed() const noexcept { return _state == state::ready; } + inline bool isRecording() const noexcept { return _state == state::recording; } + inline bool hasBeenSubmitted() const noexcept { return _state == state::submitted; } + inline state getCurrentState() const noexcept { return _state; } + + inline VkCommandBuffer& operator()() noexcept { return _cmd_buffer; } + inline VkCommandBuffer& get() noexcept { return _cmd_buffer; } + inline Fence& getFence() noexcept { return _fence; } + + private: + Fence _fence; + VkCommandBuffer _cmd_buffer = VK_NULL_HANDLE; + class CmdPool* _pool = nullptr; + state _state = state::uninit; + }; +} + +#endif // __MLX_VK_CMD_BUFFER__ diff --git a/MacroLibX/src/renderer/command/vk_cmd_pool.cpp b/MacroLibX/src/renderer/command/vk_cmd_pool.cpp new file mode 100644 index 0000000..148932f --- /dev/null +++ b/MacroLibX/src/renderer/command/vk_cmd_pool.cpp @@ -0,0 +1,35 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_cmd_pool.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:24:33 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:13:25 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_cmd_pool.h" +#include + +namespace mlx +{ + void CmdPool::init() + { + VkCommandPoolCreateInfo poolInfo{}; + poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + poolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; + poolInfo.queueFamilyIndex = Render_Core::get().getQueue().getFamilies().graphicsFamily.value(); + + VkResult res = vkCreateCommandPool(Render_Core::get().getDevice().get(), &poolInfo, nullptr, &_cmd_pool); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create command pool, %s", RCore::verbaliseResultVk(res)); + } + + void CmdPool::destroy() noexcept + { + vkDestroyCommandPool(Render_Core::get().getDevice().get(), _cmd_pool, nullptr); + _cmd_pool = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/command/vk_cmd_pool.h b/MacroLibX/src/renderer/command/vk_cmd_pool.h new file mode 100644 index 0000000..8d97157 --- /dev/null +++ b/MacroLibX/src/renderer/command/vk_cmd_pool.h @@ -0,0 +1,35 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_cmd_pool.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:24:12 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:27:26 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_CMD_POOL__ +#define __MLX_VK_CMD_POOL__ + +#include +#include + +namespace mlx +{ + class CmdPool + { + public: + void init(); + void destroy() noexcept; + + inline VkCommandPool& operator()() noexcept { return _cmd_pool; } + inline VkCommandPool& get() noexcept { return _cmd_pool; } + + private: + VkCommandPool _cmd_pool = VK_NULL_HANDLE; + }; +} + +#endif // __MLX_VK_CMD_POOL__ diff --git a/MacroLibX/src/renderer/core/memory.cpp b/MacroLibX/src/renderer/core/memory.cpp new file mode 100644 index 0000000..0604c15 --- /dev/null +++ b/MacroLibX/src/renderer/core/memory.cpp @@ -0,0 +1,183 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* memory.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: kbz_8 +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/10/20 22:02:37 by kbz_8 #+# #+# */ +/* Updated: 2024/01/03 13:09:40 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include + +#define VMA_STATIC_VULKAN_FUNCTIONS 0 +#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 +#define VMA_VULKAN_VERSION 1002000 +#define VMA_ASSERT(expr) ((void)0) +#define VMA_IMPLEMENTATION + +#ifdef MLX_COMPILER_CLANG + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Weverything" + #include + #pragma clang diagnostic pop +#elif defined(MLX_COMPILER_GCC) + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wimplicit-fallthrough" + #pragma GCC diagnostic ignored "-Wmissing-field-initializers" + #pragma GCC diagnostic ignored "-Wunused-parameter" + #pragma GCC diagnostic ignored "-Wunused-variable" + #pragma GCC diagnostic ignored "-Wparentheses" + #include + #pragma GCC diagnostic pop +#else + #include +#endif + +#include +#include + +namespace mlx +{ + void GPUallocator::init() noexcept + { + VmaVulkanFunctions vma_vulkan_func{}; + vma_vulkan_func.vkAllocateMemory = vkAllocateMemory; + vma_vulkan_func.vkBindBufferMemory = vkBindBufferMemory; + vma_vulkan_func.vkBindImageMemory = vkBindImageMemory; + vma_vulkan_func.vkCreateBuffer = vkCreateBuffer; + vma_vulkan_func.vkCreateImage = vkCreateImage; + vma_vulkan_func.vkDestroyBuffer = vkDestroyBuffer; + vma_vulkan_func.vkDestroyImage = vkDestroyImage; + vma_vulkan_func.vkFlushMappedMemoryRanges = vkFlushMappedMemoryRanges; + vma_vulkan_func.vkFreeMemory = vkFreeMemory; + vma_vulkan_func.vkGetBufferMemoryRequirements = vkGetBufferMemoryRequirements; + vma_vulkan_func.vkGetImageMemoryRequirements = vkGetImageMemoryRequirements; + vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties = vkGetPhysicalDeviceMemoryProperties; + vma_vulkan_func.vkGetPhysicalDeviceProperties = vkGetPhysicalDeviceProperties; + vma_vulkan_func.vkInvalidateMappedMemoryRanges = vkInvalidateMappedMemoryRanges; + vma_vulkan_func.vkMapMemory = vkMapMemory; + vma_vulkan_func.vkUnmapMemory = vkUnmapMemory; + vma_vulkan_func.vkCmdCopyBuffer = vkCmdCopyBuffer; + vma_vulkan_func.vkGetBufferMemoryRequirements2KHR = vkGetBufferMemoryRequirements2; + vma_vulkan_func.vkGetImageMemoryRequirements2KHR = vkGetImageMemoryRequirements2; + vma_vulkan_func.vkBindBufferMemory2KHR = vkBindBufferMemory2; + vma_vulkan_func.vkBindImageMemory2KHR = vkBindImageMemory2; + vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties2KHR = vkGetPhysicalDeviceMemoryProperties2; + + VmaAllocatorCreateInfo allocatorCreateInfo{}; + allocatorCreateInfo.vulkanApiVersion = VK_API_VERSION_1_2; + allocatorCreateInfo.physicalDevice = Render_Core::get().getDevice().getPhysicalDevice(); + allocatorCreateInfo.device = Render_Core::get().getDevice().get(); + allocatorCreateInfo.instance = Render_Core::get().getInstance().get(); + allocatorCreateInfo.pVulkanFunctions = &vma_vulkan_func; + + VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &_allocator); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Graphics allocator : failed to create graphics memory allocator, %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + core::error::report(e_kind::message, "Graphics allocator : created new allocator"); + #endif + } + + VmaAllocation GPUallocator::createBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name) noexcept + { + VmaAllocation allocation; + VkResult res = vmaCreateBuffer(_allocator, binfo, vinfo, &buffer, &allocation, nullptr); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Graphics allocator : failed to allocate a buffer, %s", RCore::verbaliseResultVk(res)); + if(name != nullptr) + vmaSetAllocationName(_allocator, allocation, name); + #ifdef DEBUG + core::error::report(e_kind::message, "Graphics Allocator : created new buffer"); + #endif + _active_buffers_allocations++; + return allocation; + } + + void GPUallocator::destroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept + { + vkDeviceWaitIdle(Render_Core::get().getDevice().get()); + vmaDestroyBuffer(_allocator, buffer, allocation); + #ifdef DEBUG + core::error::report(e_kind::message, "Graphics Allocator : destroyed buffer"); + #endif + _active_buffers_allocations--; + } + + VmaAllocation GPUallocator::createImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name) noexcept + { + VmaAllocation allocation; + VkResult res = vmaCreateImage(_allocator, iminfo, vinfo, &image, &allocation, nullptr); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Graphics allocator : failed to allocate an image, %s", RCore::verbaliseResultVk(res)); + if(name != nullptr) + vmaSetAllocationName(_allocator, allocation, name); + #ifdef DEBUG + core::error::report(e_kind::message, "Graphics Allocator : created new image"); + #endif + _active_images_allocations++; + return allocation; + } + + void GPUallocator::destroyImage(VmaAllocation allocation, VkImage image) noexcept + { + vkDeviceWaitIdle(Render_Core::get().getDevice().get()); + vmaDestroyImage(_allocator, image, allocation); + #ifdef DEBUG + core::error::report(e_kind::message, "Graphics Allocator : destroyed image"); + #endif + _active_images_allocations--; + } + + void GPUallocator::mapMemory(VmaAllocation allocation, void** data) noexcept + { + VkResult res = vmaMapMemory(_allocator, allocation, data); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Graphics allocator : unable to map GPU memory to CPU memory, %s", RCore::verbaliseResultVk(res)); + } + + void GPUallocator::unmapMemory(VmaAllocation allocation) noexcept + { + vmaUnmapMemory(_allocator, allocation); + } + + void GPUallocator::dumpMemoryToJson() + { + static uint32_t id = 0; + std::string name("memory_dump"); + name.append(std::to_string(id) + ".json"); + std::ofstream file(name); + if(!file.is_open()) + { + core::error::report(e_kind::error, "Graphics allocator : unable to dump memory to a json file"); + return; + } + char* str = nullptr; + vmaBuildStatsString(_allocator, &str, true); + file << str; + vmaFreeStatsString(_allocator, str); + file.close(); + id++; + } + + void GPUallocator::flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept + { + vmaFlushAllocation(_allocator, allocation, offset, size); + } + + void GPUallocator::destroy() noexcept + { + if(_active_images_allocations != 0) + core::error::report(e_kind::error, "Graphics allocator : some user-dependant allocations were not freed before destroying the display (%d active allocations)", _active_images_allocations); + else if(_active_buffers_allocations != 0) + core::error::report(e_kind::error, "Graphics allocator : some MLX-dependant allocations were not freed before destroying the display (%d active allocations), please report, this should not happen", _active_buffers_allocations); + vmaDestroyAllocator(_allocator); + _active_buffers_allocations = 0; + _active_images_allocations = 0; + } +} diff --git a/MacroLibX/src/renderer/core/memory.h b/MacroLibX/src/renderer/core/memory.h new file mode 100644 index 0000000..f977ace --- /dev/null +++ b/MacroLibX/src/renderer/core/memory.h @@ -0,0 +1,52 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* memory.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/10/20 02:13:03 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:25:56 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_MEMORY__ +#define __MLX_VK_MEMORY__ + +#include +#include +#include + +namespace mlx +{ + class GPUallocator + { + public: + GPUallocator() = default; + + void init() noexcept; + void destroy() noexcept; + + VmaAllocation createBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name = nullptr) noexcept; + void destroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept; + + VmaAllocation createImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name = nullptr) noexcept; + void destroyImage(VmaAllocation allocation, VkImage image) noexcept; + + void mapMemory(VmaAllocation allocation, void** data) noexcept; + void unmapMemory(VmaAllocation allocation) noexcept; + + void dumpMemoryToJson(); + + void flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept; + + ~GPUallocator() = default; + + private: + VmaAllocator _allocator; + uint32_t _active_buffers_allocations = 0; + uint32_t _active_images_allocations = 0; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/core/render_core.cpp b/MacroLibX/src/renderer/core/render_core.cpp new file mode 100644 index 0000000..aa3268d --- /dev/null +++ b/MacroLibX/src/renderer/core/render_core.cpp @@ -0,0 +1,111 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* render_core.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/12/17 23:33:34 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:22:38 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#define VOLK_IMPLEMENTATION + +#include +#include +#include + +#ifdef DEBUG + #ifdef MLX_COMPILER_MSVC + #pragma NOTE("MLX is being compiled in debug mode, this activates Vulkan's validation layers and debug messages which may impact rendering performances") + #else + #warning "MLX is being compiled in debug mode, this activates Vulkan's validation layers and debug messages which may impact rendering performances" + #endif +#endif + +namespace mlx +{ + namespace RCore + { + std::optional findMemoryType(uint32_t typeFilter, VkMemoryPropertyFlags properties, bool error) + { + VkPhysicalDeviceMemoryProperties memProperties; + vkGetPhysicalDeviceMemoryProperties(Render_Core::get().getDevice().getPhysicalDevice(), &memProperties); + + for(uint32_t i = 0; i < memProperties.memoryTypeCount; i++) + { + if((typeFilter & (1 << i)) && (memProperties.memoryTypes[i].propertyFlags & properties) == properties) + return i; + } + if(error) + core::error::report(e_kind::fatal_error, "Vulkan : failed to find suitable memory type"); + return std::nullopt; + } + + const char* verbaliseResultVk(VkResult result) + { + switch(result) + { + case VK_SUCCESS: return "Success"; + case VK_NOT_READY: return "A fence or query has not yet completed"; + case VK_TIMEOUT: return "A wait operation has not completed in the specified time"; + case VK_EVENT_SET: return "An event is signaled"; + case VK_EVENT_RESET: return "An event is unsignaled"; + case VK_INCOMPLETE: return "A return array was too small for the result"; + case VK_ERROR_OUT_OF_HOST_MEMORY: return "A host memory allocation has failed"; + case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "A device memory allocation has failed"; + case VK_ERROR_INITIALIZATION_FAILED: return "Initialization of an object could not be completed for implementation-specific reasons"; + case VK_ERROR_DEVICE_LOST: return "The logical or physical device has been lost"; + case VK_ERROR_MEMORY_MAP_FAILED: return "Mapping of a memory object has failed"; + case VK_ERROR_LAYER_NOT_PRESENT: return "A requested layer is not present or could not be loaded"; + case VK_ERROR_EXTENSION_NOT_PRESENT: return "A requested extension is not supported"; + case VK_ERROR_FEATURE_NOT_PRESENT: return "A requested feature is not supported"; + case VK_ERROR_INCOMPATIBLE_DRIVER: return "The requested version of Vulkan is not supported by the driver or is otherwise incompatible"; + case VK_ERROR_TOO_MANY_OBJECTS: return "Too many objects of the type have already been created"; + case VK_ERROR_FORMAT_NOT_SUPPORTED: return "A requested format is not supported on this device"; + case VK_ERROR_SURFACE_LOST_KHR: return "A surface is no longer available"; + case VK_SUBOPTIMAL_KHR: return "A swapchain no longer matches the surface properties exactly, but can still be used"; + case VK_ERROR_OUT_OF_DATE_KHR: return "A surface has changed in such a way that it is no longer compatible with the swapchain"; + case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "The display used by a swapchain does not use the same presentable image layout"; + case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "The requested window is already connected to a VkSurfaceKHR, or to some other non-Vulkan API"; + case VK_ERROR_VALIDATION_FAILED_EXT: return "A validation layer found an error"; + + default: return "Unknown Vulkan error"; + } + return nullptr; + } + } + + void Render_Core::init() + { + if(volkInitialize() != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan loader : cannot load %s, are you sure Vulkan is installed on your system ?", VULKAN_LIB_NAME); + + _instance.init(); + volkLoadInstance(_instance.get()); + _layers.init(); + _device.init(); + volkLoadDevice(_device.get()); + _queues.init(); + _allocator.init(); + _cmd_manager.init(); + _is_init = true; + } + + void Render_Core::destroy() + { + if(!_is_init) + return; + + vkDeviceWaitIdle(_device()); + + _cmd_manager.destroy(); + _allocator.destroy(); + _device.destroy(); + _layers.destroy(); + _instance.destroy(); + + _is_init = false; + } +} diff --git a/MacroLibX/src/renderer/core/render_core.h b/MacroLibX/src/renderer/core/render_core.h new file mode 100644 index 0000000..07e5840 --- /dev/null +++ b/MacroLibX/src/renderer/core/render_core.h @@ -0,0 +1,76 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* render_core.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:16:32 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:08 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_RENDER_CORE__ +#define __MLX_RENDER_CORE__ + +#include +#include +#include + +#include +#include "vk_queues.h" +#include "vk_device.h" +#include "vk_instance.h" +#include "vk_validation_layers.h" +#include "memory.h" + +#include +#include + +namespace mlx +{ + namespace RCore + { + std::optional findMemoryType(uint32_t typeFilter, VkMemoryPropertyFlags properties, bool error = true); + const char* verbaliseResultVk(VkResult result); + } + + #ifdef DEBUG + constexpr const bool enableValidationLayers = true; + #else + constexpr const bool enableValidationLayers = false; + #endif + + const std::vector validationLayers = { "VK_LAYER_KHRONOS_validation" }; + + constexpr const int MAX_FRAMES_IN_FLIGHT = 3; + + class Render_Core : public Singleton + { + public: + Render_Core() = default; + + void init(); + void destroy(); + + inline Instance& getInstance() noexcept { return _instance; } + inline Device& getDevice() noexcept { return _device; } + inline Queues& getQueue() noexcept { return _queues; } + inline GPUallocator& getAllocator() noexcept { return _allocator; } + inline ValidationLayers& getLayers() noexcept { return _layers; } + inline CmdBuffer& getSingleTimeCmdBuffer() noexcept { return _cmd_manager.getCmdBuffer(); } + + ~Render_Core() = default; + + private: + ValidationLayers _layers; + SingleTimeCmdManager _cmd_manager; + Queues _queues; + Device _device; + Instance _instance; + GPUallocator _allocator; + bool _is_init = false; + }; +} + +#endif // __MLX_RENDER_CORE__ diff --git a/MacroLibX/src/renderer/core/vk_device.cpp b/MacroLibX/src/renderer/core/vk_device.cpp new file mode 100644 index 0000000..f10c599 --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_device.cpp @@ -0,0 +1,163 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_device.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:14:29 by maldavid #+# #+# */ +/* Updated: 2023/12/30 23:29:41 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "render_core.h" +#include +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + const std::vector deviceExtensions = {VK_KHR_SWAPCHAIN_EXTENSION_NAME}; + + void Device::init() + { + pickPhysicalDevice(); + + Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().getFamilies(); + + std::vector queueCreateInfos; + std::set uniqueQueueFamilies = { indices.graphicsFamily.value(), indices.presentFamily.value() }; + + float queuePriority = 1.0f; + for(uint32_t queueFamily : uniqueQueueFamilies) + { + VkDeviceQueueCreateInfo queueCreateInfo{}; + queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queueCreateInfo.queueFamilyIndex = queueFamily; + queueCreateInfo.queueCount = 1; + queueCreateInfo.pQueuePriorities = &queuePriority; + queueCreateInfos.push_back(queueCreateInfo); + } + + VkPhysicalDeviceFeatures deviceFeatures{}; + + VkDeviceCreateInfo createInfo{}; + createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + + createInfo.queueCreateInfoCount = static_cast(queueCreateInfos.size()); + createInfo.pQueueCreateInfos = queueCreateInfos.data(); + + createInfo.pEnabledFeatures = &deviceFeatures; + + createInfo.enabledExtensionCount = static_cast(deviceExtensions.size()); + createInfo.ppEnabledExtensionNames = deviceExtensions.data(); + createInfo.enabledLayerCount = 0; + + VkResult res; + if((res = vkCreateDevice(_physicalDevice, &createInfo, nullptr, &_device)) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create logcal device, %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new logical device"); + #endif + } + + void Device::pickPhysicalDevice() + { + uint32_t deviceCount = 0; + vkEnumeratePhysicalDevices(Render_Core::get().getInstance().get(), &deviceCount, nullptr); + + if(deviceCount == 0) + core::error::report(e_kind::fatal_error, "Vulkan : failed to find GPUs with Vulkan support"); + + std::vector devices(deviceCount); + vkEnumeratePhysicalDevices(Render_Core::get().getInstance().get(), &deviceCount, devices.data()); + + SDL_Window* window = SDL_CreateWindow("", 0, 0, 1, 1, SDL_WINDOW_VULKAN | SDL_WINDOW_HIDDEN); + if(!window) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a window to pick physical device"); + + VkSurfaceKHR surface = VK_NULL_HANDLE; + if(SDL_Vulkan_CreateSurface(window, Render_Core::get().getInstance().get(), &surface) != SDL_TRUE) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a surface to pick physical device"); + + std::vector> devices_score; + + std::transform(devices.cbegin(), devices.cend(), std::back_inserter(devices_score), [&](VkPhysicalDevice device) + { + return std::make_pair(deviceScore(device, surface), device); + }); + + vkDestroySurfaceKHR(Render_Core::get().getInstance().get(), surface, nullptr); + SDL_DestroyWindow(window); + + using device_pair = std::pair; + std::sort(devices_score.begin(), devices_score.end(), [](const device_pair& a, const device_pair& b) + { + return a.first > b.first; + }); + + if(devices_score.front().first > 0) + _physicalDevice = devices_score.front().second; + + if(_physicalDevice == VK_NULL_HANDLE) + core::error::report(e_kind::fatal_error, "Vulkan : failed to find a suitable GPU"); + #ifdef DEBUG + VkPhysicalDeviceProperties props; + vkGetPhysicalDeviceProperties(_physicalDevice, &props); + core::error::report(e_kind::message, "Vulkan : picked a physical device, %s", props.deviceName); + #endif + } + + int Device::deviceScore(VkPhysicalDevice device, VkSurfaceKHR surface) + { + Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().findQueueFamilies(device, surface); + bool extensionsSupported = checkDeviceExtensionSupport(device); + + uint32_t formatCount = 0; + if(extensionsSupported) + vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &formatCount, nullptr); + + VkPhysicalDeviceProperties props; + vkGetPhysicalDeviceProperties(device, &props); + if(!indices.isComplete() || !extensionsSupported || formatCount == 0) + return -1; + + int score = 0; + #ifndef FORCE_INTEGRATED_GPU + if(props.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU) + score += 1000; + #else + if(props.deviceType != VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU) + return -1; + #endif + score += props.limits.maxImageDimension2D; + score += props.limits.maxBoundDescriptorSets; + return score; + } + + bool Device::checkDeviceExtensionSupport(VkPhysicalDevice device) + { + uint32_t extensionCount; + vkEnumerateDeviceExtensionProperties(device, nullptr, &extensionCount, nullptr); + + std::vector availableExtensions(extensionCount); + vkEnumerateDeviceExtensionProperties(device, nullptr, &extensionCount, availableExtensions.data()); + + std::set requiredExtensions(deviceExtensions.begin(), deviceExtensions.end()); + + for(const auto& extension : availableExtensions) + requiredExtensions.erase(extension.extensionName); + + return requiredExtensions.empty(); + } + + void Device::destroy() noexcept + { + vkDestroyDevice(_device, nullptr); + _device = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/core/vk_device.h b/MacroLibX/src/renderer/core/vk_device.h new file mode 100644 index 0000000..ecfd47e --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_device.h @@ -0,0 +1,44 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_device.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:13:42 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:14 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_DEVICE__ +#define __MLX_VK_DEVICE__ + +#include +#include +#include "vk_queues.h" + +namespace mlx +{ + class Device + { + public: + void init(); + void destroy() noexcept; + + inline VkDevice& operator()() noexcept { return _device; } + inline VkDevice& get() noexcept { return _device; } + + inline VkPhysicalDevice& getPhysicalDevice() noexcept { return _physicalDevice; } + + private: + void pickPhysicalDevice(); + bool checkDeviceExtensionSupport(VkPhysicalDevice device); + int deviceScore(VkPhysicalDevice device, VkSurfaceKHR surface); + + private: + VkPhysicalDevice _physicalDevice = VK_NULL_HANDLE; + VkDevice _device = VK_NULL_HANDLE; + }; +} + +#endif // __MLX_VK_DEVICE__ diff --git a/MacroLibX/src/renderer/core/vk_fence.cpp b/MacroLibX/src/renderer/core/vk_fence.cpp new file mode 100644 index 0000000..bad8872 --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_fence.cpp @@ -0,0 +1,53 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_fence.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 17:53:06 by maldavid #+# #+# */ +/* Updated: 2023/12/16 18:47:36 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include + +namespace mlx +{ + void Fence::init() + { + VkFenceCreateInfo fenceInfo{}; + fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT; + + VkResult res; + if((res = vkCreateFence(Render_Core::get().getDevice().get(), &fenceInfo, nullptr, &_fence)) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a synchronization object (fence), %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new fence"); + #endif + } + + void Fence::wait() noexcept + { + vkWaitForFences(Render_Core::get().getDevice().get(), 1, &_fence, VK_TRUE, UINT64_MAX); + } + + void Fence::reset() noexcept + { + vkResetFences(Render_Core::get().getDevice().get(), 1, &_fence); + } + + bool Fence::isReady() const noexcept + { + return vkGetFenceStatus(Render_Core::get().getDevice().get(), _fence) == VK_SUCCESS; + } + + void Fence::destroy() noexcept + { + if(_fence != VK_NULL_HANDLE) + vkDestroyFence(Render_Core::get().getDevice().get(), _fence, nullptr); + _fence = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/core/vk_fence.h b/MacroLibX/src/renderer/core/vk_fence.h new file mode 100644 index 0000000..d8bd364 --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_fence.h @@ -0,0 +1,43 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_fence.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 17:52:09 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:21 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_FENCE__ +#define __MLX_VK_FENCE__ + +#include +#include + +namespace mlx +{ + class Fence + { + public: + Fence() = default; + + void init(); + + inline VkFence& get() noexcept { return _fence; } + void wait() noexcept; + void reset() noexcept; + bool isReady() const noexcept; + inline void waitAndReset() noexcept { wait(); reset(); } + + void destroy() noexcept; + + ~Fence() = default; + + private: + VkFence _fence = VK_NULL_HANDLE; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/core/vk_instance.cpp b/MacroLibX/src/renderer/core/vk_instance.cpp new file mode 100644 index 0000000..d93f342 --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_instance.cpp @@ -0,0 +1,88 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_instance.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:04:21 by maldavid #+# #+# */ +/* Updated: 2023/12/31 00:40:10 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_instance.h" +#include "render_core.h" +#include +#include + +namespace mlx +{ + void Instance::init() + { + VkApplicationInfo appInfo{}; + appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; + appInfo.pEngineName = "MacroLibX"; + appInfo.engineVersion = VK_MAKE_VERSION(1, 2, 1); + appInfo.apiVersion = VK_API_VERSION_1_2; + + auto extensions = getRequiredExtensions(); + + VkInstanceCreateInfo createInfo{}; + createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + createInfo.pApplicationInfo = &appInfo; + createInfo.enabledExtensionCount = static_cast(extensions.size()); + createInfo.ppEnabledExtensionNames = extensions.data(); + createInfo.enabledLayerCount = 0; // will be replaced if validation layers are enabled + createInfo.pNext = nullptr; + + VkDebugUtilsMessengerCreateInfoEXT debugCreateInfo; + if constexpr(enableValidationLayers) + { + if(Render_Core::get().getLayers().checkValidationLayerSupport()) + { + createInfo.enabledLayerCount = static_cast(validationLayers.size()); + createInfo.ppEnabledLayerNames = validationLayers.data(); + Render_Core::get().getLayers().populateDebugMessengerCreateInfo(debugCreateInfo); + createInfo.pNext = static_cast(&debugCreateInfo); + } + } + + VkResult res; + if((res = vkCreateInstance(&createInfo, nullptr, &_instance)) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create Vulkan instance, %s", RCore::verbaliseResultVk(res)); + volkLoadInstance(_instance); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new instance"); + #endif + } + + std::vector Instance::getRequiredExtensions() + { + unsigned int count = 0; + SDL_Window* window = SDL_CreateWindow("", 0, 0, 1, 1, SDL_WINDOW_VULKAN | SDL_WINDOW_HIDDEN); + if(!window) + core::error::report(e_kind::fatal_error, "Vulkan : cannot get instance extentions from window : %s", SDL_GetError()); + + if(!SDL_Vulkan_GetInstanceExtensions(window, &count, nullptr)) + core::error::report(e_kind::fatal_error, "Vulkan : cannot get instance extentions from window : %s", SDL_GetError()); + + std::vector extensions = { VK_EXT_DEBUG_REPORT_EXTENSION_NAME }; + size_t additional_extension_count = extensions.size(); + extensions.resize(additional_extension_count + count); + + if(!SDL_Vulkan_GetInstanceExtensions(window, &count, extensions.data() + additional_extension_count)) + core::error::report(e_kind::error, "Vulkan : cannot get instance extentions from window : %s", SDL_GetError()); + + if constexpr(enableValidationLayers) + extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME); + + SDL_DestroyWindow(window); + return extensions; + } + + void Instance::destroy() noexcept + { + vkDestroyInstance(_instance, nullptr); + _instance = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/core/vk_instance.h b/MacroLibX/src/renderer/core/vk_instance.h new file mode 100644 index 0000000..e827665 --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_instance.h @@ -0,0 +1,37 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_instance.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:03:04 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:26 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_INSTANCE__ +#define __MLX_VK_INSTANCE__ + +#include +#include +#include + +namespace mlx +{ + class Instance + { + public: + void init(); + void destroy() noexcept; + + inline VkInstance& operator()() noexcept { return _instance; } + inline VkInstance& get() noexcept { return _instance; } + + private: + std::vector getRequiredExtensions(); + VkInstance _instance = VK_NULL_HANDLE; + }; +} + +#endif // __MLX_VK_INSTANCE__ diff --git a/MacroLibX/src/renderer/core/vk_queues.cpp b/MacroLibX/src/renderer/core/vk_queues.cpp new file mode 100644 index 0000000..937cc25 --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_queues.cpp @@ -0,0 +1,68 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_queues.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:02:42 by maldavid #+# #+# */ +/* Updated: 2022/12/18 22:52:04 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "render_core.h" +#include +#include + +namespace mlx +{ + Queues::QueueFamilyIndices Queues::findQueueFamilies(VkPhysicalDevice device, VkSurfaceKHR surface) + { + uint32_t queueFamilyCount = 0; + vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, nullptr); + + std::vector queueFamilies(queueFamilyCount); + vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, queueFamilies.data()); + + _families = Queues::QueueFamilyIndices{}; + int i = 0; + for(const auto& queueFamily : queueFamilies) + { + if(queueFamily.queueFlags & VK_QUEUE_GRAPHICS_BIT) + _families->graphicsFamily = i; + + VkBool32 presentSupport = false; + vkGetPhysicalDeviceSurfaceSupportKHR(device, i, surface, &presentSupport); + + if(presentSupport) + _families->presentFamily = i; + + if(_families->isComplete()) + break; + i++; + } + + return *_families; + } + + void Queues::init() + { + if(!_families.has_value()) + { + SDL_Window* window = SDL_CreateWindow("", 0, 0, 1, 1, SDL_WINDOW_VULKAN | SDL_WINDOW_HIDDEN); + if(!window) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a window to init queues"); + + VkSurfaceKHR surface = VK_NULL_HANDLE; + if(SDL_Vulkan_CreateSurface(window, Render_Core::get().getInstance().get(), &surface) != SDL_TRUE) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a surface to init queues"); + + findQueueFamilies(Render_Core::get().getDevice().getPhysicalDevice(), surface); + + vkDestroySurfaceKHR(Render_Core::get().getInstance().get(), surface, nullptr); + SDL_DestroyWindow(window); + } + vkGetDeviceQueue(Render_Core::get().getDevice().get(), _families->graphicsFamily.value(), 0, &_graphicsQueue); + vkGetDeviceQueue(Render_Core::get().getDevice().get(), _families->presentFamily.value(), 0, &_presentQueue); + } +} diff --git a/MacroLibX/src/renderer/core/vk_queues.h b/MacroLibX/src/renderer/core/vk_queues.h new file mode 100644 index 0000000..268f4fe --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_queues.h @@ -0,0 +1,49 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_queues.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:01:49 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:31 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_QUEUES__ +#define __MLX_VK_QUEUES__ + +#include +#include +#include +#include + +namespace mlx +{ + class Queues + { + public: + struct QueueFamilyIndices + { + std::optional graphicsFamily; + std::optional presentFamily; + + inline bool isComplete() { return graphicsFamily.has_value() && presentFamily.has_value(); } + }; + + QueueFamilyIndices findQueueFamilies(VkPhysicalDevice device, VkSurfaceKHR surface); + + void init(); + + inline VkQueue& getGraphic() noexcept { return _graphicsQueue; } + inline VkQueue& getPresent() noexcept { return _presentQueue; } + inline QueueFamilyIndices getFamilies() noexcept { return *_families; } + + private: + VkQueue _graphicsQueue; + VkQueue _presentQueue; + std::optional _families; + }; +} + +#endif // __MLX_VK_QUEUES__ diff --git a/MacroLibX/src/renderer/core/vk_semaphore.cpp b/MacroLibX/src/renderer/core/vk_semaphore.cpp new file mode 100644 index 0000000..604d215 --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_semaphore.cpp @@ -0,0 +1,40 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_semaphore.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:01:08 by maldavid #+# #+# */ +/* Updated: 2023/12/16 18:47:29 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_semaphore.h" +#include "render_core.h" +#include + +namespace mlx +{ + void Semaphore::init() + { + VkSemaphoreCreateInfo semaphoreInfo{}; + semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + + VkResult res; + if( (res = vkCreateSemaphore(Render_Core::get().getDevice().get(), &semaphoreInfo, nullptr, &_imageAvailableSemaphores)) != VK_SUCCESS || + (res = vkCreateSemaphore(Render_Core::get().getDevice().get(), &semaphoreInfo, nullptr, &_renderFinishedSemaphores)) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a synchronization object (semaphore), %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new semaphore"); + #endif + } + + void Semaphore::destroy() noexcept + { + vkDestroySemaphore(Render_Core::get().getDevice().get(), _renderFinishedSemaphores, nullptr); + _renderFinishedSemaphores = VK_NULL_HANDLE; + vkDestroySemaphore(Render_Core::get().getDevice().get(), _imageAvailableSemaphores, nullptr); + _imageAvailableSemaphores = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/core/vk_semaphore.h b/MacroLibX/src/renderer/core/vk_semaphore.h new file mode 100644 index 0000000..cdcb72d --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_semaphore.h @@ -0,0 +1,37 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_semaphore.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 18:59:38 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:39 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_SEMAPHORE__ +#define __MLX_VK_SEMAPHORE__ + +#include +#include +#include + +namespace mlx +{ + class Semaphore + { + public: + void init(); + void destroy() noexcept; + + inline VkSemaphore& getImageSemaphore() noexcept { return _imageAvailableSemaphores; } + inline VkSemaphore& getRenderImageSemaphore() noexcept { return _renderFinishedSemaphores; } + + private: + VkSemaphore _imageAvailableSemaphores = VK_NULL_HANDLE; + VkSemaphore _renderFinishedSemaphores = VK_NULL_HANDLE; + }; +} + +#endif // __MLX_VK_SEMAPHORE__ diff --git a/MacroLibX/src/renderer/core/vk_surface.cpp b/MacroLibX/src/renderer/core/vk_surface.cpp new file mode 100644 index 0000000..3a5fed2 --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_surface.cpp @@ -0,0 +1,46 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_surface.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 18:58:49 by maldavid #+# #+# */ +/* Updated: 2023/12/30 23:14:54 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "render_core.h" +#include +#include +#include +#include +#include + +namespace mlx +{ + void Surface::create(Renderer& renderer) + { + if(SDL_Vulkan_CreateSurface(renderer.getWindow()->getNativeWindow(), Render_Core::get().getInstance().get(), &_surface) != SDL_TRUE) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a surface : %s", SDL_GetError()); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new surface"); + #endif + } + + VkSurfaceFormatKHR Surface::chooseSwapSurfaceFormat(const std::vector& availableFormats) + { + auto it = std::find_if(availableFormats.begin(), availableFormats.end(), [](VkSurfaceFormatKHR format) + { + return format.format == VK_FORMAT_R8G8B8A8_SRGB && format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; + }); + + return (it == availableFormats.end() ? availableFormats[0] : *it); + } + + void Surface::destroy() noexcept + { + vkDestroySurfaceKHR(Render_Core::get().getInstance().get(), _surface, nullptr); + _surface = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/core/vk_surface.h b/MacroLibX/src/renderer/core/vk_surface.h new file mode 100644 index 0000000..c700f8f --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_surface.h @@ -0,0 +1,38 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_surface.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 18:57:55 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:43 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_SURFACE__ +#define __MLX_VK_SURFACE__ + +#include +#include +#include + +namespace mlx +{ + class Surface + { + public: + void create(class Renderer& renderer); + void destroy() noexcept; + + VkSurfaceFormatKHR chooseSwapSurfaceFormat(const std::vector& availableFormats); + + inline VkSurfaceKHR& operator()() noexcept { return _surface; } + inline VkSurfaceKHR& get() noexcept { return _surface; } + + private: + VkSurfaceKHR _surface = VK_NULL_HANDLE; + }; +} + +#endif // __MLX_VK_SURFACE__ diff --git a/MacroLibX/src/renderer/core/vk_validation_layers.cpp b/MacroLibX/src/renderer/core/vk_validation_layers.cpp new file mode 100644 index 0000000..367c1fb --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_validation_layers.cpp @@ -0,0 +1,103 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_validation_layers.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/12/19 14:05:25 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:11:27 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_validation_layers.h" +#include "render_core.h" + +#include +#include +#include +#include + +namespace mlx +{ + void ValidationLayers::init() + { + if constexpr(!enableValidationLayers) + return; + + VkDebugUtilsMessengerCreateInfoEXT createInfo{}; + populateDebugMessengerCreateInfo(createInfo); + VkResult res = createDebugUtilsMessengerEXT(&createInfo, nullptr); + if(res != VK_SUCCESS) + core::error::report(e_kind::warning, "Vulkan : failed to set up debug messenger, %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + else + core::error::report(e_kind::message, "Vulkan : enabled validation layers"); + #endif + } + + bool ValidationLayers::checkValidationLayerSupport() + { + uint32_t layerCount; + vkEnumerateInstanceLayerProperties(&layerCount, nullptr); + + std::vector availableLayers(layerCount); + vkEnumerateInstanceLayerProperties(&layerCount, availableLayers.data()); + + return std::all_of(validationLayers.begin(), validationLayers.end(), [&](const char* layerName) + { + if(!std::any_of(availableLayers.begin(), availableLayers.end(), [=](VkLayerProperties props) { return std::strcmp(layerName, props.layerName) == 0; })) + { + core::error::report(e_kind::error, "Vulkan : a validation layer was requested but was not found ('%s')", layerName); + return false; + } + return true; + }); + } + + void ValidationLayers::populateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& createInfo) + { + createInfo = {}; + createInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + createInfo.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; + createInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; + createInfo.pfnUserCallback = ValidationLayers::debugCallback; + } + + + void ValidationLayers::destroy() + { + if constexpr(enableValidationLayers) + destroyDebugUtilsMessengerEXT(nullptr); + } + + VkResult ValidationLayers::createDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator) + { + auto func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(Render_Core::get().getInstance().get(), "vkCreateDebugUtilsMessengerEXT"); + return func != nullptr ? func(Render_Core::get().getInstance().get(), pCreateInfo, pAllocator, &_debugMessenger) : VK_ERROR_EXTENSION_NOT_PRESENT; + } + + VKAPI_ATTR VkBool32 VKAPI_CALL ValidationLayers::debugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, [[maybe_unused]] VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, [[maybe_unused]] void* pUserData) + { + if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) + { + std::cout << '\n'; + core::error::report(e_kind::error, std::string("Vulkan layer error: ") + pCallbackData->pMessage); + } + else if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) + { + std::cout << '\n'; + core::error::report(e_kind::warning, std::string("Vulkan layer warning: ") + pCallbackData->pMessage); + } + + return VK_FALSE; + } + + void ValidationLayers::destroyDebugUtilsMessengerEXT(const VkAllocationCallbacks* pAllocator) + { + auto func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(Render_Core::get().getInstance().get(), "vkDestroyDebugUtilsMessengerEXT"); + if(func != nullptr) + func(Render_Core::get().getInstance().get(), _debugMessenger, pAllocator); + } + +} diff --git a/MacroLibX/src/renderer/core/vk_validation_layers.h b/MacroLibX/src/renderer/core/vk_validation_layers.h new file mode 100644 index 0000000..85eb48b --- /dev/null +++ b/MacroLibX/src/renderer/core/vk_validation_layers.h @@ -0,0 +1,41 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_validation_layers.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/12/19 14:04:25 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:26:49 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __VK_VALIDATION_LAYERS__ +#define __VK_VALIDATION_LAYERS__ + +#include +#include + +namespace mlx +{ + class ValidationLayers + { + public: + ValidationLayers() = default; + void init(); + bool checkValidationLayerSupport(); + void populateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& createInfo); + void destroy(); + ~ValidationLayers() = default; + + private: + VkResult createDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator); + static VKAPI_ATTR VkBool32 VKAPI_CALL debugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData); + void destroyDebugUtilsMessengerEXT(const VkAllocationCallbacks* pAllocator); + + private: + VkDebugUtilsMessengerEXT _debugMessenger; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/descriptors/vk_descriptor_pool.cpp b/MacroLibX/src/renderer/descriptors/vk_descriptor_pool.cpp new file mode 100644 index 0000000..188403f --- /dev/null +++ b/MacroLibX/src/renderer/descriptors/vk_descriptor_pool.cpp @@ -0,0 +1,36 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_descriptor_pool.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/23 18:34:23 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:13:54 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_descriptor_pool.h" +#include + +namespace mlx +{ + void DescriptorPool::init(std::size_t n, VkDescriptorPoolSize* size) + { + VkDescriptorPoolCreateInfo poolInfo{}; + poolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; + poolInfo.poolSizeCount = n; + poolInfo.pPoolSizes = size; + poolInfo.maxSets = 8192; + + VkResult res = vkCreateDescriptorPool(Render_Core::get().getDevice().get(), &poolInfo, nullptr, &_pool); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create descriptor pool, %s", RCore::verbaliseResultVk(res)); + } + + void DescriptorPool::destroy() noexcept + { + vkDestroyDescriptorPool(Render_Core::get().getDevice().get(), _pool, nullptr); + _pool = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/descriptors/vk_descriptor_pool.h b/MacroLibX/src/renderer/descriptors/vk_descriptor_pool.h new file mode 100644 index 0000000..82cf9c4 --- /dev/null +++ b/MacroLibX/src/renderer/descriptors/vk_descriptor_pool.h @@ -0,0 +1,36 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_descriptor_pool.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/23 18:32:43 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:27:45 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __VK_DESCRIPTOR_POOL__ +#define __VK_DESCRIPTOR_POOL__ + +#include +#include +#include + +namespace mlx +{ + class DescriptorPool + { + public: + void init(std::size_t n, VkDescriptorPoolSize* size); + void destroy() noexcept; + + inline VkDescriptorPool& operator()() noexcept { return _pool; } + inline VkDescriptorPool& get() noexcept { return _pool; } + + private: + VkDescriptorPool _pool = VK_NULL_HANDLE; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/descriptors/vk_descriptor_set.cpp b/MacroLibX/src/renderer/descriptors/vk_descriptor_set.cpp new file mode 100644 index 0000000..b840c7f --- /dev/null +++ b/MacroLibX/src/renderer/descriptors/vk_descriptor_set.cpp @@ -0,0 +1,108 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_descriptor_set.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/23 18:40:44 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:14:24 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_descriptor_set.h" +#include "renderer/core/render_core.h" +#include "vk_descriptor_pool.h" +#include "vk_descriptor_set_layout.h" +#include +#include +#include + +namespace mlx +{ + void DescriptorSet::init(Renderer* renderer, DescriptorPool* pool, DescriptorSetLayout* layout) + { + _renderer = renderer; + _layout = layout; + _pool = pool; + + auto device = Render_Core::get().getDevice().get(); + + std::array layouts; + layouts.fill(layout->get()); + + VkDescriptorSetAllocateInfo allocInfo{}; + allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + allocInfo.descriptorPool = _pool->get(); + allocInfo.descriptorSetCount = static_cast(MAX_FRAMES_IN_FLIGHT); + allocInfo.pSetLayouts = layouts.data(); + + VkResult res = vkAllocateDescriptorSets(device, &allocInfo, _desc_set.data()); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to allocate descriptor set, %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new descriptor set"); + #endif + } + + void DescriptorSet::writeDescriptor(int binding, UBO* ubo) const noexcept + { + auto device = Render_Core::get().getDevice().get(); + + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + VkDescriptorBufferInfo bufferInfo{}; + bufferInfo.buffer = ubo->get(i); + bufferInfo.offset = ubo->getOffset(i); + bufferInfo.range = ubo->getSize(i); + + VkWriteDescriptorSet descriptorWrite{}; + descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptorWrite.dstSet = _desc_set[i]; + descriptorWrite.dstBinding = binding; + descriptorWrite.dstArrayElement = 0; + descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; + descriptorWrite.descriptorCount = 1; + descriptorWrite.pBufferInfo = &bufferInfo; + + vkUpdateDescriptorSets(device, 1, &descriptorWrite, 0, nullptr); + } + } + + void DescriptorSet::writeDescriptor(int binding, const Image& image) const noexcept + { + auto device = Render_Core::get().getDevice().get(); + + VkDescriptorImageInfo imageInfo{}; + imageInfo.imageLayout = image.getLayout(); + imageInfo.imageView = image.getImageView(); + imageInfo.sampler = image.getSampler(); + + VkWriteDescriptorSet descriptorWrite{}; + descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptorWrite.dstSet = _desc_set[_renderer->getActiveImageIndex()]; + descriptorWrite.dstBinding = binding; + descriptorWrite.dstArrayElement = 0; + descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + descriptorWrite.descriptorCount = 1; + descriptorWrite.pImageInfo = &imageInfo; + + vkUpdateDescriptorSets(device, 1, &descriptorWrite, 0, nullptr); + } + + DescriptorSet DescriptorSet::duplicate() + { + DescriptorSet set; + set.init(_renderer, _pool, _layout); + return set; + } + + VkDescriptorSet& DescriptorSet::operator()() noexcept + { + return _desc_set[_renderer->getActiveImageIndex()]; + } + VkDescriptorSet& DescriptorSet::get() noexcept + { + return _desc_set[_renderer->getActiveImageIndex()]; + } +} diff --git a/MacroLibX/src/renderer/descriptors/vk_descriptor_set.h b/MacroLibX/src/renderer/descriptors/vk_descriptor_set.h new file mode 100644 index 0000000..d555d5c --- /dev/null +++ b/MacroLibX/src/renderer/descriptors/vk_descriptor_set.h @@ -0,0 +1,46 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_descriptor_set.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/23 18:39:36 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:27:50 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __VK_DESCRIPTOR_SET__ +#define __VK_DESCRIPTOR_SET__ + +#include +#include +#include +#include + +namespace mlx +{ + class DescriptorSet + { + public: + void init(class Renderer* renderer, class DescriptorPool* pool, class DescriptorSetLayout* layout); + + void writeDescriptor(int binding, class UBO* ubo) const noexcept; + void writeDescriptor(int binding, const class Image& image) const noexcept; + + inline bool isInit() const noexcept { return _pool != nullptr && _renderer != nullptr; } + + DescriptorSet duplicate(); + + VkDescriptorSet& operator()() noexcept; + VkDescriptorSet& get() noexcept; + + private: + std::array _desc_set; + class DescriptorPool* _pool = nullptr; + class DescriptorSetLayout* _layout = nullptr; + class Renderer* _renderer = nullptr; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/descriptors/vk_descriptor_set_layout.cpp b/MacroLibX/src/renderer/descriptors/vk_descriptor_set_layout.cpp new file mode 100644 index 0000000..12b98d8 --- /dev/null +++ b/MacroLibX/src/renderer/descriptors/vk_descriptor_set_layout.cpp @@ -0,0 +1,47 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_descriptor_set_layout.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/23 18:37:28 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:14:58 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_descriptor_set_layout.h" +#include + +namespace mlx +{ + void DescriptorSetLayout::init(std::vector> binds, VkShaderStageFlagBits stage) + { + std::vector bindings(binds.size()); + for(std::size_t i = 0; i < binds.size(); i++) + { + bindings[i].binding = binds[i].first; + bindings[i].descriptorCount = 1; + bindings[i].descriptorType = binds[i].second; + bindings[i].pImmutableSamplers = nullptr; + bindings[i].stageFlags = stage; + } + + _bindings = std::move(binds); + + VkDescriptorSetLayoutCreateInfo layoutInfo{}; + layoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; + layoutInfo.bindingCount = _bindings.size(); + layoutInfo.pBindings = bindings.data(); + + VkResult res = vkCreateDescriptorSetLayout(Render_Core::get().getDevice().get(), &layoutInfo, nullptr, &_layout); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create descriptor set layout, %s", RCore::verbaliseResultVk(res)); + } + + void DescriptorSetLayout::destroy() noexcept + { + vkDestroyDescriptorSetLayout(Render_Core::get().getDevice().get(), _layout, nullptr); + _layout = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/descriptors/vk_descriptor_set_layout.h b/MacroLibX/src/renderer/descriptors/vk_descriptor_set_layout.h new file mode 100644 index 0000000..dd9495b --- /dev/null +++ b/MacroLibX/src/renderer/descriptors/vk_descriptor_set_layout.h @@ -0,0 +1,40 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_descriptor_set_layout.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/23 18:36:22 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:27:55 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __VK_DESCRIPTOR_SET_LAYOUT__ +#define __VK_DESCRIPTOR_SET_LAYOUT__ + +#include +#include +#include +#include +#include + +namespace mlx +{ + class DescriptorSetLayout + { + public: + void init(std::vector> binds, VkShaderStageFlagBits stage); + void destroy() noexcept; + + inline VkDescriptorSetLayout& operator()() noexcept { return _layout; } + inline VkDescriptorSetLayout& get() noexcept { return _layout; } + inline const std::vector>& getBindings() const noexcept { return _bindings; } + + private: + VkDescriptorSetLayout _layout = VK_NULL_HANDLE; + std::vector> _bindings; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/font.cpp b/MacroLibX/src/renderer/font.cpp new file mode 100644 index 0000000..b053b8f --- /dev/null +++ b/MacroLibX/src/renderer/font.cpp @@ -0,0 +1,84 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* font.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/12/11 22:06:09 by kbz_8 #+# #+# */ +/* Updated: 2023/12/14 19:11:41 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include + +constexpr const int RANGE = 1024; + +namespace mlx +{ + Font::Font(Renderer& renderer, const std::filesystem::path& path, float scale) : non_copyable(), _name(path.string()), _scale(scale) + { + std::vector tmp_bitmap(RANGE * RANGE); + std::vector vulkan_bitmap(RANGE * RANGE * 4); + + std::ifstream file(path, std::ios::binary); + if(!file.is_open()) + { + core::error::report(e_kind::error, "Font load : cannot open font file, %s", _name.c_str()); + return; + } + std::ifstream::pos_type fileSize = std::filesystem::file_size(path); + file.seekg(0, std::ios::beg); + std::vector bytes(fileSize); + file.read(reinterpret_cast(bytes.data()), fileSize); + file.close(); + + stbtt_pack_context pc; + stbtt_PackBegin(&pc, tmp_bitmap.data(), RANGE, RANGE, RANGE, 1, nullptr); + stbtt_PackFontRange(&pc, bytes.data(), 0, scale, 32, 96, _cdata.data()); + stbtt_PackEnd(&pc); + for(int i = 0, j = 0; i < RANGE * RANGE; i++, j += 4) + { + vulkan_bitmap[j + 0] = tmp_bitmap[i]; + vulkan_bitmap[j + 1] = tmp_bitmap[i]; + vulkan_bitmap[j + 2] = tmp_bitmap[i]; + vulkan_bitmap[j + 3] = tmp_bitmap[i]; + } + #ifdef DEBUG + _atlas.create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, std::string(_name + "_font_altas").c_str(), true); + #else + _atlas.create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, nullptr, true); + #endif + _atlas.setDescriptor(renderer.getFragDescriptorSet().duplicate()); + } + + Font::Font(class Renderer& renderer, const std::string& name, const std::vector& ttf_data, float scale) : non_copyable(), _name(name), _scale(scale) + { + std::vector tmp_bitmap(RANGE * RANGE); + std::vector vulkan_bitmap(RANGE * RANGE * 4); + stbtt_pack_context pc; + stbtt_PackBegin(&pc, tmp_bitmap.data(), RANGE, RANGE, RANGE, 1, nullptr); + stbtt_PackFontRange(&pc, ttf_data.data(), 0, scale, 32, 96, _cdata.data()); + stbtt_PackEnd(&pc); + for(int i = 0, j = 0; i < RANGE * RANGE; i++, j += 4) + { + vulkan_bitmap[j + 0] = tmp_bitmap[i]; + vulkan_bitmap[j + 1] = tmp_bitmap[i]; + vulkan_bitmap[j + 2] = tmp_bitmap[i]; + vulkan_bitmap[j + 3] = tmp_bitmap[i]; + } + #ifdef DEBUG + _atlas.create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, std::string(_name + "_font_altas").c_str(), true); + #else + _atlas.create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, nullptr, true); + #endif + _atlas.setDescriptor(renderer.getFragDescriptorSet().duplicate()); + } + + Font::~Font() + { + _atlas.destroy(); + } +} diff --git a/MacroLibX/src/renderer/font.h b/MacroLibX/src/renderer/font.h new file mode 100644 index 0000000..d25c4b7 --- /dev/null +++ b/MacroLibX/src/renderer/font.h @@ -0,0 +1,60 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* font.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/12/11 21:17:04 by kbz_8 #+# #+# */ +/* Updated: 2023/12/14 17:51:40 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_FONT__ +#define __MLX_FONT__ + +#include +#include +#include +#include +#include + +namespace mlx +{ + class Font : public non_copyable + { + public: + Font() = delete; + Font(class Renderer& renderer, const std::filesystem::path& path, float scale); + Font(class Renderer& renderer, const std::string& name, const std::vector& ttf_data, float scale); + inline const std::string& getName() const { return _name; } + inline float getScale() const noexcept { return _scale; } + inline const std::array& getCharData() const { return _cdata; } + inline const TextureAtlas& getAtlas() const noexcept { return _atlas; } + inline bool operator==(const Font& rhs) const { return rhs._name == _name; } + inline bool operator!=(const Font& rhs) const { return rhs._name != _name; } + ~Font(); + + private: + std::array _cdata; + TextureAtlas _atlas; + std::string _name; + float _scale = 0; + }; +} + +namespace std +{ + template <> + struct hash + { + std::size_t operator()(const mlx::Font& f) const noexcept + { + std::size_t hash = 0; + mlx::hashCombine(hash, f.getName(), f.getScale()); + return hash; + } + }; +} + +#endif diff --git a/MacroLibX/src/renderer/images/texture.cpp b/MacroLibX/src/renderer/images/texture.cpp new file mode 100644 index 0000000..5a3b3da --- /dev/null +++ b/MacroLibX/src/renderer/images/texture.cpp @@ -0,0 +1,171 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* texture.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/03/31 18:03:35 by maldavid #+# #+# */ +/* Updated: 2023/12/31 00:49:16 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include +#include + +#define STB_IMAGE_IMPLEMENTATION +#include + +#include + +#ifdef IMAGE_OPTIMIZED + #define TILING VK_IMAGE_TILING_OPTIMAL +#else + #define TILING VK_IMAGE_TILING_LINEAR +#endif + +namespace mlx +{ + void Texture::create(uint8_t* pixels, uint32_t width, uint32_t height, VkFormat format, const char* name, bool dedicated_memory) + { + Image::create(width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, name, dedicated_memory); + Image::createImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + Image::createSampler(); + transitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + + std::vector vertexData = { + {{0, 0}, {1.f, 1.f, 1.f, 1.f}, {0.0f, 0.0f}}, + {{width, 0}, {1.f, 1.f, 1.f, 1.f}, {1.0f, 0.0f}}, + {{width, height}, {1.f, 1.f, 1.f, 1.f}, {1.0f, 1.0f}}, + {{0, height}, {1.f, 1.f, 1.f, 1.f}, {0.0f, 1.0f}} + }; + + std::vector indexData = { 0, 1, 2, 2, 3, 0 }; + + #ifdef DEBUG + _vbo.create(sizeof(Vertex) * vertexData.size(), vertexData.data(), name); + _ibo.create(sizeof(uint16_t) * indexData.size(), indexData.data(), name); + _name = name; + #else + _vbo.create(sizeof(Vertex) * vertexData.size(), vertexData.data(), nullptr); + _ibo.create(sizeof(uint16_t) * indexData.size(), indexData.data(), nullptr); + #endif + + Buffer staging_buffer; + std::size_t size = width * height * formatSize(format); + if(pixels != nullptr) + { + #ifdef DEBUG + staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, pixels); + #else + staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, pixels); + #endif + } + else + { + std::vector default_pixels(width * height, 0x00000000); + #ifdef DEBUG + staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, default_pixels.data()); + #else + staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, default_pixels.data()); + #endif + } + Image::copyFromBuffer(staging_buffer); + staging_buffer.destroy(); + } + + void Texture::setPixel(int x, int y, uint32_t color) noexcept + { + if(x < 0 || y < 0 || static_cast(x) > getWidth() || static_cast(y) > getHeight()) + return; + if(_map == nullptr) + openCPUmap(); + _cpu_map[(y * getWidth()) + x] = color; + _has_been_modified = true; + } + + int Texture::getPixel(int x, int y) noexcept + { + if(x < 0 || y < 0 || static_cast(x) > getWidth() || static_cast(y) > getHeight()) + return 0; + if(_map == nullptr) + openCPUmap(); + uint32_t color = _cpu_map[(y * getWidth()) + x]; + return (color); + } + + void Texture::openCPUmap() + { + if(_map != nullptr) + return; + + #ifdef DEBUG + core::error::report(e_kind::message, "Texture : enabling CPU mapping"); + #endif + std::size_t size = getWidth() * getHeight() * formatSize(getFormat()); + _buf_map.emplace(); + #ifdef DEBUG + _buf_map->create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, _name.c_str()); + #else + _buf_map->create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, nullptr); + #endif + Image::copyToBuffer(*_buf_map); + _buf_map->mapMem(&_map); + _cpu_map = std::vector(getWidth() * getHeight(), 0); + std::memcpy(_cpu_map.data(), _map, size); + #ifdef DEBUG + core::error::report(e_kind::message, "Texture : mapped CPU memory using staging buffer"); + #endif + } + + void Texture::render(Renderer& renderer, int x, int y) + { + if(_has_been_modified) + { + std::memcpy(_map, _cpu_map.data(), _cpu_map.size() * formatSize(getFormat())); + Image::copyFromBuffer(*_buf_map); + _has_been_modified = false; + } + auto cmd = renderer.getActiveCmdBuffer(); + _vbo.bind(renderer); + _ibo.bind(renderer); + glm::vec2 translate(x, y); + vkCmdPushConstants(cmd.get(), renderer.getPipeline().getPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(translate), &translate); + vkCmdDrawIndexed(cmd.get(), static_cast(_ibo.getSize() / sizeof(uint16_t)), 1, 0, 0, 0); + } + + void Texture::destroy() noexcept + { + Image::destroy(); + if(_buf_map.has_value()) + _buf_map->destroy(); + _vbo.destroy(); + _ibo.destroy(); + } + + Texture stbTextureLoad(std::filesystem::path file, int* w, int* h) + { + Texture texture; + int channels; + uint8_t* data = nullptr; + std::string filename = file.string(); + + if(!std::filesystem::exists(std::move(file))) + core::error::report(e_kind::fatal_error, "Image : file not found '%s'", filename.c_str()); + if(stbi_is_hdr(filename.c_str())) + core::error::report(e_kind::fatal_error, "Texture : unsupported image format '%s'", filename.c_str()); + int dummy_w; + int dummy_h; + data = stbi_load(filename.c_str(), (w == nullptr ? &dummy_w : w), (h == nullptr ? &dummy_h : h), &channels, 4); + #ifdef DEBUG + texture.create(data, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_UNORM, filename.c_str()); + #else + texture.create(data, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_UNORM, nullptr); + #endif + stbi_image_free(data); + return texture; + } +} diff --git a/MacroLibX/src/renderer/images/texture.h b/MacroLibX/src/renderer/images/texture.h new file mode 100644 index 0000000..19f7d24 --- /dev/null +++ b/MacroLibX/src/renderer/images/texture.h @@ -0,0 +1,79 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* texture.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/03/08 02:24:58 by maldavid #+# #+# */ +/* Updated: 2023/12/23 18:49:12 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_TEXTURE__ +#define __MLX_TEXTURE__ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + class Texture : public Image + { + public: + Texture() = default; + + void create(uint8_t* pixels, uint32_t width, uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); + void render(class Renderer& renderer, int x, int y); + void destroy() noexcept override; + + void setPixel(int x, int y, uint32_t color) noexcept; + int getPixel(int x, int y) noexcept; + + inline void setDescriptor(DescriptorSet set) noexcept { _set = std::move(set); } + inline VkDescriptorSet getSet() noexcept { return _set.isInit() ? _set.get() : VK_NULL_HANDLE; } + inline void updateSet(int binding) noexcept { _set.writeDescriptor(binding, *this); _has_been_updated = true; } + inline bool hasBeenUpdated() const noexcept { return _has_been_updated; } + inline constexpr void resetUpdate() noexcept { _has_been_updated = false; } + + ~Texture() = default; + + private: + void openCPUmap(); + + private: + C_VBO _vbo; + C_IBO _ibo; + #ifdef DEBUG + std::string _name; + #endif + DescriptorSet _set; + std::vector _cpu_map; + std::optional _buf_map = std::nullopt; + void* _map = nullptr; + bool _has_been_modified = false; + bool _has_been_updated = false; + }; + + Texture stbTextureLoad(std::filesystem::path file, int* w, int* h); + + struct TextureRenderData + { + Texture* texture; + int x; + int y; + + TextureRenderData(Texture* _texture, int _x, int _y) : texture(_texture), x(_x), y(_y) {} + bool operator==(const TextureRenderData& rhs) const { return texture == rhs.texture && x == rhs.x && y == rhs.y; } + }; +} + +#endif diff --git a/MacroLibX/src/renderer/images/texture_atlas.cpp b/MacroLibX/src/renderer/images/texture_atlas.cpp new file mode 100644 index 0000000..b6eb61b --- /dev/null +++ b/MacroLibX/src/renderer/images/texture_atlas.cpp @@ -0,0 +1,55 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* texture_atlas.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/07 16:40:09 by maldavid #+# #+# */ +/* Updated: 2023/12/31 00:52:01 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include + +#ifdef IMAGE_OPTIMIZED + #define TILING VK_IMAGE_TILING_OPTIMAL +#else + #define TILING VK_IMAGE_TILING_LINEAR +#endif + +namespace mlx +{ + void TextureAtlas::create(uint8_t* pixels, uint32_t width, uint32_t height, VkFormat format, const char* name, bool dedicated_memory) + { + Image::create(width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, name, dedicated_memory); + Image::createImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + Image::createSampler(); + transitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + + if(pixels == nullptr) + { + core::error::report(e_kind::warning, "Renderer : creating an empty texture atlas. They cannot be updated after creation, this might be a mistake or a bug, please report"); + return; + } + Buffer staging_buffer; + std::size_t size = width * height * formatSize(format); + staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, pixels); + Image::copyFromBuffer(staging_buffer); + staging_buffer.destroy(); + } + + void TextureAtlas::render(Renderer& renderer, int x, int y, uint32_t ibo_size) const + { + auto cmd = renderer.getActiveCmdBuffer().get(); + + glm::vec2 translate(x, y); + vkCmdPushConstants(cmd, renderer.getPipeline().getPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(translate), &translate); + vkCmdDrawIndexed(cmd, ibo_size / sizeof(uint16_t), 1, 0, 0, 0); + } + + void TextureAtlas::destroy() noexcept + { + Image::destroy(); + } +} diff --git a/MacroLibX/src/renderer/images/texture_atlas.h b/MacroLibX/src/renderer/images/texture_atlas.h new file mode 100644 index 0000000..6191ca9 --- /dev/null +++ b/MacroLibX/src/renderer/images/texture_atlas.h @@ -0,0 +1,43 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* texture_atlas.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/07 16:36:33 by maldavid #+# #+# */ +/* Updated: 2023/12/23 18:49:25 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_TEXTURE_ATLAS__ +#define __MLX_TEXTURE_ATLAS__ + +#include +#include +#include +#include + +namespace mlx +{ + class TextureAtlas : public Image + { + public: + TextureAtlas() = default; + + void create(uint8_t* pixels, uint32_t width, uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); + void render(class Renderer& renderer, int x, int y, uint32_t ibo_size) const; + void destroy() noexcept override; + + inline void setDescriptor(DescriptorSet&& set) noexcept { _set = set; } + inline VkDescriptorSet getSet() noexcept { return _set.isInit() ? _set.get() : VK_NULL_HANDLE; } + inline void updateSet(int binding) const noexcept { _set.writeDescriptor(binding, *this); } + + ~TextureAtlas() = default; + + private: + DescriptorSet _set; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/images/vk_image.cpp b/MacroLibX/src/renderer/images/vk_image.cpp new file mode 100644 index 0000000..9cca944 --- /dev/null +++ b/MacroLibX/src/renderer/images/vk_image.cpp @@ -0,0 +1,475 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_image.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/25 11:59:07 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:16:21 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_image.h" +#include +#include +#include +#include + +namespace mlx +{ + bool isStencilFormat(VkFormat format) + { + switch(format) + { + case VK_FORMAT_D32_SFLOAT_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + return true; + + default: return false; + } + } + + bool isDepthFormat(VkFormat format) + { + switch(format) + { + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_D32_SFLOAT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D16_UNORM_S8_UINT: + return true; + + default: return false; + } + } + + VkFormat bitsToFormat(uint32_t bits) + { + switch(bits) + { + case 8: return VK_FORMAT_R8_UNORM; + case 16: return VK_FORMAT_R8G8_UNORM; + case 24: return VK_FORMAT_R8G8B8_UNORM; + case 32: return VK_FORMAT_R8G8B8A8_UNORM; + case 48: return VK_FORMAT_R16G16B16_SFLOAT; + case 64: return VK_FORMAT_R16G16B16A16_SFLOAT; + case 96: return VK_FORMAT_R32G32B32_SFLOAT; + case 128: return VK_FORMAT_R32G32B32A32_SFLOAT; + + default: + core::error::report(e_kind::fatal_error, "Vulkan : unsupported image bit-depth"); + return VK_FORMAT_R8G8B8A8_UNORM; + } + } + + VkPipelineStageFlags layoutToAccessMask(VkImageLayout layout, bool isDestination) + { + VkPipelineStageFlags accessMask = 0; + + switch(layout) + { + case VK_IMAGE_LAYOUT_UNDEFINED: + if(isDestination) + core::error::report(e_kind::error, "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); + break; + case VK_IMAGE_LAYOUT_GENERAL: accessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: accessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: accessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: + accessMask = VK_ACCESS_SHADER_READ_BIT; // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; + break; + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: accessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: accessMask = VK_ACCESS_TRANSFER_READ_BIT; break; + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: accessMask = VK_ACCESS_TRANSFER_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_PREINITIALIZED: + if(!isDestination) + accessMask = VK_ACCESS_HOST_WRITE_BIT; + else + core::error::report(e_kind::error, "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); + break; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: accessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: accessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: accessMask = VK_ACCESS_MEMORY_READ_BIT; break; + + default: core::error::report(e_kind::error, "Vulkan : unexpected image layout"); break; + } + + return accessMask; + } + + VkPipelineStageFlags accessFlagsToPipelineStage(VkAccessFlags accessFlags, VkPipelineStageFlags stageFlags) + { + VkPipelineStageFlags stages = 0; + + while(accessFlags != 0) + { + VkAccessFlagBits AccessFlag = static_cast(accessFlags & (~(accessFlags - 1))); + if(AccessFlag == 0 || (AccessFlag & (AccessFlag - 1)) != 0) + core::error::report(e_kind::fatal_error, "Vulkan : an error has been caught during access flag to pipeline stage operation"); + accessFlags &= ~AccessFlag; + + switch(AccessFlag) + { + case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: stages |= VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; break; + case VK_ACCESS_INDEX_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; + case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; + case VK_ACCESS_UNIFORM_READ_BIT: stages |= stageFlags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break; + case VK_ACCESS_SHADER_READ_BIT: stages |= stageFlags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_SHADER_WRITE_BIT: stages |= stageFlags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; + case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; + case VK_ACCESS_TRANSFER_READ_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; + case VK_ACCESS_TRANSFER_WRITE_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; + case VK_ACCESS_HOST_READ_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; + case VK_ACCESS_HOST_WRITE_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; + case VK_ACCESS_MEMORY_READ_BIT: break; + case VK_ACCESS_MEMORY_WRITE_BIT: break; + + default: core::error::report(e_kind::error, "Vulkan : unknown access flag"); break; + } + } + return stages; + } + + void Image::create(uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, const char* name, bool dedicated_memory) + { + _width = width; + _height = height; + _format = format; + _tiling = tiling; + + VkImageCreateInfo imageInfo{}; + imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + imageInfo.imageType = VK_IMAGE_TYPE_2D; + imageInfo.extent.width = width; + imageInfo.extent.height = height; + imageInfo.extent.depth = 1; + imageInfo.mipLevels = 1; + imageInfo.arrayLayers = 1; + imageInfo.format = format; + imageInfo.tiling = tiling; + imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + imageInfo.usage = usage; + imageInfo.samples = VK_SAMPLE_COUNT_1_BIT; + imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + + VmaAllocationCreateInfo alloc_info{}; + alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; + if(dedicated_memory) + { + alloc_info.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; + alloc_info.priority = 1.0f; + } + + _allocation = Render_Core::get().getAllocator().createImage(&imageInfo, &alloc_info, _image, name); + } + + void Image::createImageView(VkImageViewType type, VkImageAspectFlags aspectFlags) noexcept + { + VkImageViewCreateInfo viewInfo{}; + viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + viewInfo.image = _image; + viewInfo.viewType = type; + viewInfo.format = _format; + viewInfo.subresourceRange.aspectMask = aspectFlags; + viewInfo.subresourceRange.baseMipLevel = 0; + viewInfo.subresourceRange.levelCount = 1; + viewInfo.subresourceRange.baseArrayLayer = 0; + viewInfo.subresourceRange.layerCount = 1; + + VkResult res = vkCreateImageView(Render_Core::get().getDevice().get(), &viewInfo, nullptr, &_image_view); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create an image view, %s", RCore::verbaliseResultVk(res)); + } + + void Image::createSampler() noexcept + { + VkSamplerCreateInfo info{}; + info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; + info.magFilter = VK_FILTER_NEAREST; + info.minFilter = VK_FILTER_NEAREST; + info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST; + info.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT; + info.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT; + info.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT; + info.minLod = -1000; + info.maxLod = 1000; + info.anisotropyEnable = VK_FALSE; + info.maxAnisotropy = 1.0f; + + VkResult res = vkCreateSampler(Render_Core::get().getDevice().get(), &info, nullptr, &_sampler); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create an image, %s", RCore::verbaliseResultVk(res)); + } + + void Image::copyFromBuffer(Buffer& buffer) + { + CmdBuffer& cmd = Render_Core::get().getSingleTimeCmdBuffer(); + cmd.beginRecord(); + + VkImageLayout layout_save = _layout; + transitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &cmd); + + VkBufferImageCopy region{}; + region.bufferOffset = 0; + region.bufferRowLength = 0; + region.bufferImageHeight = 0; + region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + region.imageSubresource.mipLevel = 0; + region.imageSubresource.baseArrayLayer = 0; + region.imageSubresource.layerCount = 1; + region.imageOffset = { 0, 0, 0 }; + region.imageExtent = { _width, _height, 1 }; + + vkCmdCopyBufferToImage(cmd.get(), buffer.get(), _image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); + + transitionLayout(layout_save, &cmd); + + cmd.endRecord(); + cmd.submitIdle(); + } + + void Image::copyToBuffer(Buffer& buffer) + { + CmdBuffer& cmd = Render_Core::get().getSingleTimeCmdBuffer(); + cmd.beginRecord(); + + VkImageLayout layout_save = _layout; + transitionLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, &cmd); + + VkBufferImageCopy region{}; + region.bufferOffset = 0; + region.bufferRowLength = 0; + region.bufferImageHeight = 0; + region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + region.imageSubresource.mipLevel = 0; + region.imageSubresource.baseArrayLayer = 0; + region.imageSubresource.layerCount = 1; + region.imageOffset = { 0, 0, 0 }; + region.imageExtent = { _width, _height, 1 }; + + vkCmdCopyImageToBuffer(cmd.get(), _image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer.get(), 1, ®ion); + + transitionLayout(layout_save, &cmd); + + cmd.endRecord(); + cmd.submitIdle(); + } + + void Image::transitionLayout(VkImageLayout new_layout, CmdBuffer* cmd) + { + if(new_layout == _layout) + return; + + bool singleTime = (cmd == nullptr); + if(singleTime) + { + cmd = &Render_Core::get().getSingleTimeCmdBuffer(); + cmd->beginRecord(); + } + + VkImageMemoryBarrier barrier{}; + barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + barrier.oldLayout = _layout; + barrier.newLayout = new_layout; + barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + barrier.image = _image; + barrier.subresourceRange.aspectMask = isDepthFormat(_format) ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT; + barrier.subresourceRange.baseMipLevel = 0; + barrier.subresourceRange.levelCount = 1; + barrier.subresourceRange.baseArrayLayer = 0; + barrier.subresourceRange.layerCount = 1; + barrier.srcAccessMask = layoutToAccessMask(_layout, false); + barrier.dstAccessMask = layoutToAccessMask(new_layout, true); + if(isStencilFormat(_format)) + barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT; + + VkPipelineStageFlags sourceStage = 0; + if(barrier.oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) + sourceStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + else if(barrier.srcAccessMask != 0) + sourceStage = accessFlagsToPipelineStage(barrier.srcAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); + else + sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + + VkPipelineStageFlags destinationStage = 0; + if(barrier.newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) + destinationStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + else if(barrier.dstAccessMask != 0) + destinationStage = accessFlagsToPipelineStage(barrier.dstAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); + else + destinationStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + + vkCmdPipelineBarrier(cmd->get(), sourceStage, destinationStage, 0, 0, nullptr, 0, nullptr, 1, &barrier); + + if(singleTime) + { + cmd->endRecord(); + cmd->submitIdle(); + } + _layout = new_layout; + } + + void Image::destroySampler() noexcept + { + if(_sampler != VK_NULL_HANDLE) + vkDestroySampler(Render_Core::get().getDevice().get(), _sampler, nullptr); + _sampler = VK_NULL_HANDLE; + } + + void Image::destroyImageView() noexcept + { + if(_image_view != VK_NULL_HANDLE) + vkDestroyImageView(Render_Core::get().getDevice().get(), _image_view, nullptr); + _image_view = VK_NULL_HANDLE; + } + + void Image::destroy() noexcept + { + destroySampler(); + destroyImageView(); + + if(_image != VK_NULL_HANDLE) + Render_Core::get().getAllocator().destroyImage(_allocation, _image); + _image = VK_NULL_HANDLE; + } + + uint32_t formatSize(VkFormat format) + { + switch(format) + { + case VK_FORMAT_UNDEFINED: return 0; + case VK_FORMAT_R4G4_UNORM_PACK8: return 1; + case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return 2; + case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return 2; + case VK_FORMAT_R5G6B5_UNORM_PACK16: return 2; + case VK_FORMAT_B5G6R5_UNORM_PACK16: return 2; + case VK_FORMAT_R5G5B5A1_UNORM_PACK16: return 2; + case VK_FORMAT_B5G5R5A1_UNORM_PACK16: return 2; + case VK_FORMAT_A1R5G5B5_UNORM_PACK16: return 2; + case VK_FORMAT_R8_UNORM: return 1; + case VK_FORMAT_R8_SNORM: return 1; + case VK_FORMAT_R8_USCALED: return 1; + case VK_FORMAT_R8_SSCALED: return 1; + case VK_FORMAT_R8_UINT: return 1; + case VK_FORMAT_R8_SINT: return 1; + case VK_FORMAT_R8_SRGB: return 1; + case VK_FORMAT_R8G8_UNORM: return 2; + case VK_FORMAT_R8G8_SNORM: return 2; + case VK_FORMAT_R8G8_USCALED: return 2; + case VK_FORMAT_R8G8_SSCALED: return 2; + case VK_FORMAT_R8G8_UINT: return 2; + case VK_FORMAT_R8G8_SINT: return 2; + case VK_FORMAT_R8G8_SRGB: return 2; + case VK_FORMAT_R8G8B8_UNORM: return 3; + case VK_FORMAT_R8G8B8_SNORM: return 3; + case VK_FORMAT_R8G8B8_USCALED: return 3; + case VK_FORMAT_R8G8B8_SSCALED: return 3; + case VK_FORMAT_R8G8B8_UINT: return 3; + case VK_FORMAT_R8G8B8_SINT: return 3; + case VK_FORMAT_R8G8B8_SRGB: return 3; + case VK_FORMAT_B8G8R8_UNORM: return 3; + case VK_FORMAT_B8G8R8_SNORM: return 3; + case VK_FORMAT_B8G8R8_USCALED: return 3; + case VK_FORMAT_B8G8R8_SSCALED: return 3; + case VK_FORMAT_B8G8R8_UINT: return 3; + case VK_FORMAT_B8G8R8_SINT: return 3; + case VK_FORMAT_B8G8R8_SRGB: return 3; + case VK_FORMAT_R8G8B8A8_UNORM: return 4; + case VK_FORMAT_R8G8B8A8_SNORM: return 4; + case VK_FORMAT_R8G8B8A8_USCALED: return 4; + case VK_FORMAT_R8G8B8A8_SSCALED: return 4; + case VK_FORMAT_R8G8B8A8_UINT: return 4; + case VK_FORMAT_R8G8B8A8_SINT: return 4; + case VK_FORMAT_R8G8B8A8_SRGB: return 4; + case VK_FORMAT_B8G8R8A8_UNORM: return 4; + case VK_FORMAT_B8G8R8A8_SNORM: return 4; + case VK_FORMAT_B8G8R8A8_USCALED: return 4; + case VK_FORMAT_B8G8R8A8_SSCALED: return 4; + case VK_FORMAT_B8G8R8A8_UINT: return 4; + case VK_FORMAT_B8G8R8A8_SINT: return 4; + case VK_FORMAT_B8G8R8A8_SRGB: return 4; + case VK_FORMAT_A8B8G8R8_UNORM_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_SNORM_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_USCALED_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_UINT_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_SINT_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_SRGB_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_SNORM_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_USCALED_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_UINT_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_SINT_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_SNORM_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_USCALED_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_UINT_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_SINT_PACK32: return 4; + case VK_FORMAT_R16_UNORM: return 2; + case VK_FORMAT_R16_SNORM: return 2; + case VK_FORMAT_R16_USCALED: return 2; + case VK_FORMAT_R16_SSCALED: return 2; + case VK_FORMAT_R16_UINT: return 2; + case VK_FORMAT_R16_SINT: return 2; + case VK_FORMAT_R16_SFLOAT: return 2; + case VK_FORMAT_R16G16_UNORM: return 4; + case VK_FORMAT_R16G16_SNORM: return 4; + case VK_FORMAT_R16G16_USCALED: return 4; + case VK_FORMAT_R16G16_SSCALED: return 4; + case VK_FORMAT_R16G16_UINT: return 4; + case VK_FORMAT_R16G16_SINT: return 4; + case VK_FORMAT_R16G16_SFLOAT: return 4; + case VK_FORMAT_R16G16B16_UNORM: return 6; + case VK_FORMAT_R16G16B16_SNORM: return 6; + case VK_FORMAT_R16G16B16_USCALED: return 6; + case VK_FORMAT_R16G16B16_SSCALED: return 6; + case VK_FORMAT_R16G16B16_UINT: return 6; + case VK_FORMAT_R16G16B16_SINT: return 6; + case VK_FORMAT_R16G16B16_SFLOAT: return 6; + case VK_FORMAT_R16G16B16A16_UNORM: return 8; + case VK_FORMAT_R16G16B16A16_SNORM: return 8; + case VK_FORMAT_R16G16B16A16_USCALED: return 8; + case VK_FORMAT_R16G16B16A16_SSCALED: return 8; + case VK_FORMAT_R16G16B16A16_UINT: return 8; + case VK_FORMAT_R16G16B16A16_SINT: return 8; + case VK_FORMAT_R16G16B16A16_SFLOAT: return 8; + case VK_FORMAT_R32_UINT: return 4; + case VK_FORMAT_R32_SINT: return 4; + case VK_FORMAT_R32_SFLOAT: return 4; + case VK_FORMAT_R32G32_UINT: return 8; + case VK_FORMAT_R32G32_SINT: return 8; + case VK_FORMAT_R32G32_SFLOAT: return 8; + case VK_FORMAT_R32G32B32_UINT: return 12; + case VK_FORMAT_R32G32B32_SINT: return 12; + case VK_FORMAT_R32G32B32_SFLOAT: return 12; + case VK_FORMAT_R32G32B32A32_UINT: return 16; + case VK_FORMAT_R32G32B32A32_SINT: return 16; + case VK_FORMAT_R32G32B32A32_SFLOAT: return 16; + case VK_FORMAT_R64_UINT: return 8; + case VK_FORMAT_R64_SINT: return 8; + case VK_FORMAT_R64_SFLOAT: return 8; + case VK_FORMAT_R64G64_UINT: return 16; + case VK_FORMAT_R64G64_SINT: return 16; + case VK_FORMAT_R64G64_SFLOAT: return 16; + case VK_FORMAT_R64G64B64_UINT: return 24; + case VK_FORMAT_R64G64B64_SINT: return 24; + case VK_FORMAT_R64G64B64_SFLOAT: return 24; + case VK_FORMAT_R64G64B64A64_UINT: return 32; + case VK_FORMAT_R64G64B64A64_SINT: return 32; + case VK_FORMAT_R64G64B64A64_SFLOAT: return 32; + case VK_FORMAT_B10G11R11_UFLOAT_PACK32: return 4; + case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: return 4; + + default: return 0; + } + } +} diff --git a/MacroLibX/src/renderer/images/vk_image.h b/MacroLibX/src/renderer/images/vk_image.h new file mode 100644 index 0000000..f552fee --- /dev/null +++ b/MacroLibX/src/renderer/images/vk_image.h @@ -0,0 +1,81 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_image.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/25 11:54:21 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:28:07 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_IMAGE__ +#define __MLX_VK_IMAGE__ + +#include +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + uint32_t formatSize(VkFormat format); + + class Image + { + friend class SwapChain; + + public: + Image() = default; + + inline void create(VkImage image, VkFormat format, uint32_t width, uint32_t height, VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED) noexcept + { + _image = image; + _format = format; + _width = width; + _height = height; + _layout = layout; + } + void create(uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, const char* name, bool decated_memory = false); + void createImageView(VkImageViewType type, VkImageAspectFlags aspectFlags) noexcept; + void createSampler() noexcept; + void copyFromBuffer(class Buffer& buffer); + void copyToBuffer(class Buffer& buffer); + void transitionLayout(VkImageLayout new_layout, CmdBuffer* cmd = nullptr); + virtual void destroy() noexcept; + + inline VkImage get() noexcept { return _image; } + inline VkImage operator()() noexcept { return _image; } + inline VkImageView getImageView() const noexcept { return _image_view; } + inline VkFormat getFormat() const noexcept { return _format; } + inline VkImageTiling getTiling() const noexcept { return _tiling; } + inline VkImageLayout getLayout() const noexcept { return _layout; } + inline VkSampler getSampler() const noexcept { return _sampler; } + inline uint32_t getWidth() const noexcept { return _width; } + inline uint32_t getHeight() const noexcept { return _height; } + inline bool isInit() const noexcept { return _image != VK_NULL_HANDLE; } + + virtual ~Image() = default; + + private: + void destroySampler() noexcept; + void destroyImageView() noexcept; + + private: + VmaAllocation _allocation; + VkImage _image = VK_NULL_HANDLE; + VkImageView _image_view = VK_NULL_HANDLE; + VkSampler _sampler = VK_NULL_HANDLE; + VkFormat _format; + VkImageTiling _tiling; + VkImageLayout _layout = VK_IMAGE_LAYOUT_UNDEFINED; + uint32_t _width = 0; + uint32_t _height = 0; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/pipeline/pipeline.cpp b/MacroLibX/src/renderer/pipeline/pipeline.cpp new file mode 100644 index 0000000..8d234f7 --- /dev/null +++ b/MacroLibX/src/renderer/pipeline/pipeline.cpp @@ -0,0 +1,324 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* pipeline.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/12/18 21:27:38 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:16:57 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "pipeline.h" +#include +#include +#include + +namespace mlx +{ + /** + #version 450 core + + layout(location = 0) in vec2 aPos; + layout(location = 1) in vec4 aColor; + layout(location = 2) in vec2 aUV; + + layout(set = 0, binding = 0) uniform uProjection { + mat4 mat; + } uProj; + + layout(push_constant) uniform uModelPushConstant { + vec2 vec; + } uTranslate; + + out gl_PerVertex { + vec4 gl_Position; + }; + + layout(location = 0) out struct { + vec4 Color; + vec2 UV; + } Out; + + void main() + { + Out.Color = aColor; + Out.UV = aUV; + vec2 pos = aPos + uTranslate.vec; + gl_Position = uProj.mat * vec4(pos.x, pos.y, 0.0, 1.0); + } + */ + const std::vector vertex_shader = { + 0x07230203,0x00010000,0x0008000b,0x0000003b,0x00000000,0x00020011,0x00000001,0x0006000b, + 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001, + 0x000a000f,0x00000000,0x00000004,0x6e69616d,0x00000000,0x0000000b,0x0000000f,0x00000015, + 0x0000001b,0x00000026,0x00030003,0x00000002,0x000001c2,0x00040005,0x00000004,0x6e69616d, + 0x00000000,0x00030005,0x00000009,0x00000000,0x00050006,0x00000009,0x00000000,0x6f6c6f43, + 0x00000072,0x00040006,0x00000009,0x00000001,0x00005655,0x00030005,0x0000000b,0x0074754f, + 0x00040005,0x0000000f,0x6c6f4361,0x0000726f,0x00030005,0x00000015,0x00565561,0x00030005, + 0x0000001a,0x00736f70,0x00040005,0x0000001b,0x736f5061,0x00000000,0x00070005,0x0000001d, + 0x646f4d75,0x75506c65,0x6f436873,0x6174736e,0x0000746e,0x00040006,0x0000001d,0x00000000, + 0x00636576,0x00050005,0x0000001f,0x61725475,0x616c736e,0x00006574,0x00060005,0x00000024, + 0x505f6c67,0x65567265,0x78657472,0x00000000,0x00060006,0x00000024,0x00000000,0x505f6c67, + 0x7469736f,0x006e6f69,0x00030005,0x00000026,0x00000000,0x00050005,0x00000028,0x6f725075, + 0x7463656a,0x006e6f69,0x00040006,0x00000028,0x00000000,0x0074616d,0x00040005,0x0000002a, + 0x6f725075,0x0000006a,0x00040047,0x0000000b,0x0000001e,0x00000000,0x00040047,0x0000000f, + 0x0000001e,0x00000001,0x00040047,0x00000015,0x0000001e,0x00000002,0x00040047,0x0000001b, + 0x0000001e,0x00000000,0x00050048,0x0000001d,0x00000000,0x00000023,0x00000000,0x00030047, + 0x0000001d,0x00000002,0x00050048,0x00000024,0x00000000,0x0000000b,0x00000000,0x00030047, + 0x00000024,0x00000002,0x00040048,0x00000028,0x00000000,0x00000005,0x00050048,0x00000028, + 0x00000000,0x00000023,0x00000000,0x00050048,0x00000028,0x00000000,0x00000007,0x00000010, + 0x00030047,0x00000028,0x00000002,0x00040047,0x0000002a,0x00000022,0x00000000,0x00040047, + 0x0000002a,0x00000021,0x00000000,0x00020013,0x00000002,0x00030021,0x00000003,0x00000002, + 0x00030016,0x00000006,0x00000020,0x00040017,0x00000007,0x00000006,0x00000004,0x00040017, + 0x00000008,0x00000006,0x00000002,0x0004001e,0x00000009,0x00000007,0x00000008,0x00040020, + 0x0000000a,0x00000003,0x00000009,0x0004003b,0x0000000a,0x0000000b,0x00000003,0x00040015, + 0x0000000c,0x00000020,0x00000001,0x0004002b,0x0000000c,0x0000000d,0x00000000,0x00040020, + 0x0000000e,0x00000001,0x00000007,0x0004003b,0x0000000e,0x0000000f,0x00000001,0x00040020, + 0x00000011,0x00000003,0x00000007,0x0004002b,0x0000000c,0x00000013,0x00000001,0x00040020, + 0x00000014,0x00000001,0x00000008,0x0004003b,0x00000014,0x00000015,0x00000001,0x00040020, + 0x00000017,0x00000003,0x00000008,0x00040020,0x00000019,0x00000007,0x00000008,0x0004003b, + 0x00000014,0x0000001b,0x00000001,0x0003001e,0x0000001d,0x00000008,0x00040020,0x0000001e, + 0x00000009,0x0000001d,0x0004003b,0x0000001e,0x0000001f,0x00000009,0x00040020,0x00000020, + 0x00000009,0x00000008,0x0003001e,0x00000024,0x00000007,0x00040020,0x00000025,0x00000003, + 0x00000024,0x0004003b,0x00000025,0x00000026,0x00000003,0x00040018,0x00000027,0x00000007, + 0x00000004,0x0003001e,0x00000028,0x00000027,0x00040020,0x00000029,0x00000002,0x00000028, + 0x0004003b,0x00000029,0x0000002a,0x00000002,0x00040020,0x0000002b,0x00000002,0x00000027, + 0x00040015,0x0000002e,0x00000020,0x00000000,0x0004002b,0x0000002e,0x0000002f,0x00000000, + 0x00040020,0x00000030,0x00000007,0x00000006,0x0004002b,0x0000002e,0x00000033,0x00000001, + 0x0004002b,0x00000006,0x00000036,0x00000000,0x0004002b,0x00000006,0x00000037,0x3f800000, + 0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005,0x0004003b, + 0x00000019,0x0000001a,0x00000007,0x0004003d,0x00000007,0x00000010,0x0000000f,0x00050041, + 0x00000011,0x00000012,0x0000000b,0x0000000d,0x0003003e,0x00000012,0x00000010,0x0004003d, + 0x00000008,0x00000016,0x00000015,0x00050041,0x00000017,0x00000018,0x0000000b,0x00000013, + 0x0003003e,0x00000018,0x00000016,0x0004003d,0x00000008,0x0000001c,0x0000001b,0x00050041, + 0x00000020,0x00000021,0x0000001f,0x0000000d,0x0004003d,0x00000008,0x00000022,0x00000021, + 0x00050081,0x00000008,0x00000023,0x0000001c,0x00000022,0x0003003e,0x0000001a,0x00000023, + 0x00050041,0x0000002b,0x0000002c,0x0000002a,0x0000000d,0x0004003d,0x00000027,0x0000002d, + 0x0000002c,0x00050041,0x00000030,0x00000031,0x0000001a,0x0000002f,0x0004003d,0x00000006, + 0x00000032,0x00000031,0x00050041,0x00000030,0x00000034,0x0000001a,0x00000033,0x0004003d, + 0x00000006,0x00000035,0x00000034,0x00070050,0x00000007,0x00000038,0x00000032,0x00000035, + 0x00000036,0x00000037,0x00050091,0x00000007,0x00000039,0x0000002d,0x00000038,0x00050041, + 0x00000011,0x0000003a,0x00000026,0x0000000d,0x0003003e,0x0000003a,0x00000039,0x000100fd, + 0x00010038 + }; + + /** + #version 450 core + + layout(location = 0) out vec4 fColor; + + layout(set = 1, binding = 0) uniform sampler2D sTexture; + + layout(location = 0) in struct { + vec4 Color; + vec2 UV; + } In; + + void main() + { + vec4 process_color = In.Color * texture(sTexture, In.UV.st); + if(process_color.w == 0) + discard; + fColor = process_color; + } + */ + const std::vector fragment_shader = { + 0x07230203,0x00010000,0x0008000b,0x0000002c,0x00000000,0x00020011,0x00000001,0x0006000b, + 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001, + 0x0007000f,0x00000004,0x00000004,0x6e69616d,0x00000000,0x0000000d,0x0000002a,0x00030010, + 0x00000004,0x00000007,0x00030003,0x00000002,0x000001c2,0x00040005,0x00000004,0x6e69616d, + 0x00000000,0x00060005,0x00000009,0x636f7270,0x5f737365,0x6f6c6f63,0x00000072,0x00030005, + 0x0000000b,0x00000000,0x00050006,0x0000000b,0x00000000,0x6f6c6f43,0x00000072,0x00040006, + 0x0000000b,0x00000001,0x00005655,0x00030005,0x0000000d,0x00006e49,0x00050005,0x00000016, + 0x78655473,0x65727574,0x00000000,0x00040005,0x0000002a,0x6c6f4366,0x0000726f,0x00040047, + 0x0000000d,0x0000001e,0x00000000,0x00040047,0x00000016,0x00000022,0x00000001,0x00040047, + 0x00000016,0x00000021,0x00000000,0x00040047,0x0000002a,0x0000001e,0x00000000,0x00020013, + 0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,0x00040017, + 0x00000007,0x00000006,0x00000004,0x00040020,0x00000008,0x00000007,0x00000007,0x00040017, + 0x0000000a,0x00000006,0x00000002,0x0004001e,0x0000000b,0x00000007,0x0000000a,0x00040020, + 0x0000000c,0x00000001,0x0000000b,0x0004003b,0x0000000c,0x0000000d,0x00000001,0x00040015, + 0x0000000e,0x00000020,0x00000001,0x0004002b,0x0000000e,0x0000000f,0x00000000,0x00040020, + 0x00000010,0x00000001,0x00000007,0x00090019,0x00000013,0x00000006,0x00000001,0x00000000, + 0x00000000,0x00000000,0x00000001,0x00000000,0x0003001b,0x00000014,0x00000013,0x00040020, + 0x00000015,0x00000000,0x00000014,0x0004003b,0x00000015,0x00000016,0x00000000,0x0004002b, + 0x0000000e,0x00000018,0x00000001,0x00040020,0x00000019,0x00000001,0x0000000a,0x00040015, + 0x0000001e,0x00000020,0x00000000,0x0004002b,0x0000001e,0x0000001f,0x00000003,0x00040020, + 0x00000020,0x00000007,0x00000006,0x0004002b,0x00000006,0x00000023,0x00000000,0x00020014, + 0x00000024,0x00040020,0x00000029,0x00000003,0x00000007,0x0004003b,0x00000029,0x0000002a, + 0x00000003,0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005, + 0x0004003b,0x00000008,0x00000009,0x00000007,0x00050041,0x00000010,0x00000011,0x0000000d, + 0x0000000f,0x0004003d,0x00000007,0x00000012,0x00000011,0x0004003d,0x00000014,0x00000017, + 0x00000016,0x00050041,0x00000019,0x0000001a,0x0000000d,0x00000018,0x0004003d,0x0000000a, + 0x0000001b,0x0000001a,0x00050057,0x00000007,0x0000001c,0x00000017,0x0000001b,0x00050085, + 0x00000007,0x0000001d,0x00000012,0x0000001c,0x0003003e,0x00000009,0x0000001d,0x00050041, + 0x00000020,0x00000021,0x00000009,0x0000001f,0x0004003d,0x00000006,0x00000022,0x00000021, + 0x000500b4,0x00000024,0x00000025,0x00000022,0x00000023,0x000300f7,0x00000027,0x00000000, + 0x000400fa,0x00000025,0x00000026,0x00000027,0x000200f8,0x00000026,0x000100fc,0x000200f8, + 0x00000027,0x0004003d,0x00000007,0x0000002b,0x00000009,0x0003003e,0x0000002a,0x0000002b, + 0x000100fd,0x00010038 + }; + + void GraphicPipeline::init(Renderer& renderer) + { + VkShaderModuleCreateInfo createInfo{}; + createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + createInfo.codeSize = vertex_shader.size() * sizeof(uint32_t); + createInfo.pCode = vertex_shader.data(); + VkShaderModule vshader; + if(vkCreateShaderModule(Render_Core::get().getDevice().get(), &createInfo, nullptr, &vshader) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a vertex shader module"); + + VkPushConstantRange push_constant; + push_constant.offset = 0; + push_constant.size = sizeof(glm::vec2); + push_constant.stageFlags = VK_SHADER_STAGE_VERTEX_BIT; + + createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + createInfo.codeSize = fragment_shader.size() * sizeof(uint32_t); + createInfo.pCode = fragment_shader.data(); + VkShaderModule fshader; + if(vkCreateShaderModule(Render_Core::get().getDevice().get(), &createInfo, nullptr, &fshader) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a fragment shader module"); + + VkPipelineShaderStageCreateInfo vertShaderStageInfo{}; + vertShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + vertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT; + vertShaderStageInfo.module = vshader; + vertShaderStageInfo.pName = "main"; + + VkPipelineShaderStageCreateInfo fragShaderStageInfo{}; + fragShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + fragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT; + fragShaderStageInfo.module = fshader; + fragShaderStageInfo.pName = "main"; + + std::array stages = {vertShaderStageInfo, fragShaderStageInfo}; + + auto bindingDescription = Vertex::getBindingDescription(); + auto attributeDescriptions = Vertex::getAttributeDescriptions(); + + VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo{}; + vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + vertexInputStateCreateInfo.vertexBindingDescriptionCount = 1; + vertexInputStateCreateInfo.pVertexBindingDescriptions = &bindingDescription; + vertexInputStateCreateInfo.vertexAttributeDescriptionCount = static_cast(attributeDescriptions.size()); + vertexInputStateCreateInfo.pVertexAttributeDescriptions = attributeDescriptions.data(); + + VkPipelineInputAssemblyStateCreateInfo inputAssembly{}; + inputAssembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + inputAssembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; + inputAssembly.primitiveRestartEnable = VK_FALSE; + + VkDynamicState states[] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; + + constexpr size_t statesCount = sizeof(states) / sizeof(VkDynamicState); + VkPipelineDynamicStateCreateInfo dynamicStates{}; + dynamicStates.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamicStates.dynamicStateCount = statesCount; + dynamicStates.pDynamicStates = states; + + VkViewport viewport{}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = (float)renderer.getFrameBuffer(0).getWidth(); + viewport.height = (float)renderer.getFrameBuffer(0).getHeight(); + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + + VkRect2D scissor{}; + scissor.offset = { 0, 0 }; + scissor.extent = { renderer.getFrameBuffer(0).getWidth(), renderer.getFrameBuffer(0).getHeight()}; + + VkPipelineViewportStateCreateInfo viewportState{}; + viewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewportState.viewportCount = 1; + viewportState.pViewports = &viewport; + viewportState.scissorCount = 1; + viewportState.pScissors = &scissor; + + VkPipelineRasterizationStateCreateInfo rasterizer{}; + rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + rasterizer.depthClampEnable = VK_FALSE; + rasterizer.rasterizerDiscardEnable = VK_FALSE; + rasterizer.polygonMode = VK_POLYGON_MODE_FILL; + rasterizer.lineWidth = 1.0f; + rasterizer.cullMode = VK_CULL_MODE_NONE; + rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; + rasterizer.depthBiasEnable = VK_FALSE; + + VkPipelineMultisampleStateCreateInfo multisampling{}; + multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + multisampling.sampleShadingEnable = VK_FALSE; + multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; + + VkPipelineColorBlendAttachmentState colorBlendAttachment{}; + colorBlendAttachment.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + colorBlendAttachment.blendEnable = VK_TRUE; + colorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; + colorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; + colorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD; + colorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; + colorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; + colorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD; + + VkPipelineColorBlendStateCreateInfo colorBlending{}; + colorBlending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + colorBlending.logicOpEnable = VK_FALSE; + colorBlending.logicOp = VK_LOGIC_OP_COPY; + colorBlending.attachmentCount = 1; + colorBlending.pAttachments = &colorBlendAttachment; + colorBlending.blendConstants[0] = 1.0f; + colorBlending.blendConstants[1] = 1.0f; + colorBlending.blendConstants[2] = 1.0f; + colorBlending.blendConstants[3] = 1.0f; + + VkDescriptorSetLayout layouts[] = { + renderer.getVertDescriptorSetLayout().get(), + renderer.getFragDescriptorSetLayout().get() + }; + + VkPipelineLayoutCreateInfo pipelineLayoutInfo{}; + pipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipelineLayoutInfo.setLayoutCount = 2; + pipelineLayoutInfo.pSetLayouts = layouts; + pipelineLayoutInfo.pushConstantRangeCount = 1; + pipelineLayoutInfo.pPushConstantRanges = &push_constant; + + if(vkCreatePipelineLayout(Render_Core::get().getDevice().get(), &pipelineLayoutInfo, nullptr, &_pipelineLayout) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a graphics pipeline layout"); + + VkGraphicsPipelineCreateInfo pipelineInfo{}; + pipelineInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipelineInfo.stageCount = stages.size(); + pipelineInfo.pStages = stages.data(); + pipelineInfo.pVertexInputState = &vertexInputStateCreateInfo; + pipelineInfo.pInputAssemblyState = &inputAssembly; + pipelineInfo.pViewportState = &viewportState; + pipelineInfo.pRasterizationState = &rasterizer; + pipelineInfo.pMultisampleState = &multisampling; + pipelineInfo.pColorBlendState = &colorBlending; + pipelineInfo.pDynamicState = &dynamicStates; + pipelineInfo.layout = _pipelineLayout; + pipelineInfo.renderPass = renderer.getRenderPass().get(); + pipelineInfo.subpass = 0; + pipelineInfo.basePipelineHandle = VK_NULL_HANDLE; + + VkResult res = vkCreateGraphicsPipelines(Render_Core::get().getDevice().get(), VK_NULL_HANDLE, 1, &pipelineInfo, nullptr, &_graphicsPipeline); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a graphics pipeline, %s", RCore::verbaliseResultVk(res)); +#ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new graphic pipeline"); +#endif + + vkDestroyShaderModule(Render_Core::get().getDevice().get(), fshader, nullptr); + vkDestroyShaderModule(Render_Core::get().getDevice().get(), vshader, nullptr); + } + + void GraphicPipeline::destroy() noexcept + { + vkDestroyPipeline(Render_Core::get().getDevice().get(), _graphicsPipeline, nullptr); + vkDestroyPipelineLayout(Render_Core::get().getDevice().get(), _pipelineLayout, nullptr); + } +} diff --git a/MacroLibX/src/renderer/pipeline/pipeline.h b/MacroLibX/src/renderer/pipeline/pipeline.h new file mode 100644 index 0000000..517ccb9 --- /dev/null +++ b/MacroLibX/src/renderer/pipeline/pipeline.h @@ -0,0 +1,40 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* pipeline.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/12/18 21:23:52 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:28:13 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __PIPELINE__ +#define __PIPELINE__ + +#include +#include +#include + +namespace mlx +{ + class GraphicPipeline + { + public: + void init(class Renderer& renderer); + void destroy() noexcept; + + inline void bindPipeline(CmdBuffer& commandBuffer) noexcept { vkCmdBindPipeline(commandBuffer.get(), VK_PIPELINE_BIND_POINT_GRAPHICS, _graphicsPipeline); } + + inline const VkPipeline& getPipeline() const noexcept { return _graphicsPipeline; } + inline const VkPipelineLayout& getPipelineLayout() const noexcept { return _pipelineLayout; } + + private: + VkPipeline _graphicsPipeline = VK_NULL_HANDLE; + VkPipelineCache _cache = VK_NULL_HANDLE; + VkPipelineLayout _pipelineLayout = VK_NULL_HANDLE; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/pixel_put.cpp b/MacroLibX/src/renderer/pixel_put.cpp new file mode 100644 index 0000000..c94d30b --- /dev/null +++ b/MacroLibX/src/renderer/pixel_put.cpp @@ -0,0 +1,67 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* pixel_put.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/03/31 15:14:50 by maldavid #+# #+# */ +/* Updated: 2023/12/23 19:34:30 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include + +namespace mlx +{ + void PixelPutPipeline::init(uint32_t width, uint32_t height, Renderer& renderer) noexcept + { + _texture.create(nullptr, width, height, VK_FORMAT_R8G8B8A8_UNORM, "__mlx_pixel_put_pipeline_texture", true); + _texture.setDescriptor(renderer.getFragDescriptorSet().duplicate()); + + _buffer.create(Buffer::kind::dynamic, sizeof(uint32_t) * (width * height), VK_BUFFER_USAGE_TRANSFER_SRC_BIT, "__mlx_pixel_put_pipeline_texture"); + _buffer.mapMem(&_buffer_map); + _cpu_map = std::vector(height * width + 1, 0); + _width = width; + _height = height; + } + + void PixelPutPipeline::setPixel(int x, int y, uint32_t color) noexcept + { + if(x < 0 || y < 0 || x > static_cast(_width) || y > static_cast(_height)) + return; + _cpu_map[(y * _width) + x] = color; + _has_been_modified = true; + } + + void PixelPutPipeline::clear() + { + _cpu_map.assign(_width * _height, 0); + _has_been_modified = true; + } + + void PixelPutPipeline::present() noexcept + { + if(_has_been_modified) + { + std::memcpy(_buffer_map, _cpu_map.data(), sizeof(uint32_t) * _cpu_map.size()); + _texture.copyFromBuffer(_buffer); + _has_been_modified = false; + } + _texture.updateSet(0); + } + + void PixelPutPipeline::render(Renderer& renderer) noexcept + { + _texture.render(renderer, 0, 0); + } + + void PixelPutPipeline::destroy() noexcept + { + _buffer.destroy(); + _texture.destroy(); + } + + PixelPutPipeline::~PixelPutPipeline() {} +} diff --git a/MacroLibX/src/renderer/pixel_put.h b/MacroLibX/src/renderer/pixel_put.h new file mode 100644 index 0000000..eddae2f --- /dev/null +++ b/MacroLibX/src/renderer/pixel_put.h @@ -0,0 +1,51 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* pixel_put.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/03/31 13:18:50 by maldavid #+# #+# */ +/* Updated: 2023/12/14 18:24:58 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_PIXEL_PUT__ +#define __MLX_PIXEL_PUT__ + +#include +#include +#include + +namespace mlx +{ + class PixelPutPipeline + { + public: + PixelPutPipeline() = default; + + void init(uint32_t width, uint32_t height, class Renderer& renderer) noexcept; + + void setPixel(int x, int y, uint32_t color) noexcept; + void present() noexcept; + void render(class Renderer& renderer) noexcept; + inline VkDescriptorSet getDescriptorSet() noexcept { return _texture.getSet(); } + + void clear(); + void destroy() noexcept; + + ~PixelPutPipeline(); + + private: + Texture _texture; + Buffer _buffer; + // using vector as CPU map and not directly writting to mapped buffer to improve performances + std::vector _cpu_map; + void* _buffer_map = nullptr; + uint32_t _width = 0; + uint32_t _height = 0; + bool _has_been_modified = true; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/renderer.cpp b/MacroLibX/src/renderer/renderer.cpp new file mode 100644 index 0000000..2a57b20 --- /dev/null +++ b/MacroLibX/src/renderer/renderer.cpp @@ -0,0 +1,179 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* renderer.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/12/18 17:25:16 by maldavid #+# #+# */ +/* Updated: 2023/12/24 16:04:04 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include + +namespace mlx +{ + void Renderer::init(Texture* render_target) + { + if(render_target == nullptr) + { + _surface.create(*this); + _swapchain.init(this); + _pass.init(_swapchain.getImagesFormat(), VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); + for(std::size_t i = 0; i < _swapchain.getImagesNumber(); i++) + _framebuffers.emplace_back().init(_pass, _swapchain.getImage(i)); + } + else + { + _render_target = render_target; + _render_target->transitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + _pass.init(_render_target->getFormat(), _render_target->getLayout()); + _framebuffers.emplace_back().init(_pass, *static_cast(_render_target)); + } + _cmd.init(); + + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + _semaphores[i].init(); + + _uniform_buffer.reset(new UBO); + #ifdef DEBUG + _uniform_buffer->create(this, sizeof(glm::mat4), "__mlx_matrices_uniform_buffer_"); + #else + _uniform_buffer->create(this, sizeof(glm::mat4), nullptr); + #endif + + VkDescriptorPoolSize pool_sizes[] = { + { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 4096 }, + { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 4096 } + }; + _desc_pool.init(2, pool_sizes); + + _vert_layout.init({ + {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER} + }, VK_SHADER_STAGE_VERTEX_BIT); + _frag_layout.init({ + {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER} + }, VK_SHADER_STAGE_FRAGMENT_BIT); + + _vert_set.init(this, &_desc_pool, &_vert_layout); + _frag_set.init(this, &_desc_pool, &_frag_layout); + + _vert_set.writeDescriptor(0, _uniform_buffer.get()); + + _pipeline.init(*this); + + _framebufferResized = false; + } + + bool Renderer::beginFrame() + { + auto device = Render_Core::get().getDevice().get(); + + if(_render_target == nullptr) + { + _cmd.getCmdBuffer(_current_frame_index).waitForExecution(); + VkResult result = vkAcquireNextImageKHR(device, _swapchain(), UINT64_MAX, _semaphores[_current_frame_index].getImageSemaphore(), VK_NULL_HANDLE, &_image_index); + + if(result == VK_ERROR_OUT_OF_DATE_KHR) + { + _swapchain.recreate(); + return false; + } + else if(result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) + core::error::report(e_kind::fatal_error, "Vulkan error : failed to acquire swapchain image"); + } + else + { + _image_index = 0; + if(_render_target->getLayout() != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) + _render_target->transitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + } + + _cmd.getCmdBuffer(_current_frame_index).reset(); + _cmd.getCmdBuffer(_current_frame_index).beginRecord(); + auto& fb = _framebuffers[_image_index]; + _pass.begin(getActiveCmdBuffer(), fb); + + _pipeline.bindPipeline(_cmd.getCmdBuffer(_current_frame_index)); + + VkViewport viewport{}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = static_cast(fb.getWidth()); + viewport.height = static_cast(fb.getHeight()); + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + vkCmdSetViewport(_cmd.getCmdBuffer(_current_frame_index).get(), 0, 1, &viewport); + + VkRect2D scissor{}; + scissor.offset = { 0, 0 }; + scissor.extent = { fb.getWidth(), fb.getHeight()}; + vkCmdSetScissor(_cmd.getCmdBuffer(_current_frame_index).get(), 0, 1, &scissor); + + return true; + } + + void Renderer::endFrame() + { + _pass.end(getActiveCmdBuffer()); + _cmd.getCmdBuffer(_current_frame_index).endRecord(); + + if(_render_target == nullptr) + { + _cmd.getCmdBuffer(_current_frame_index).submit(&_semaphores[_current_frame_index]); + + VkSwapchainKHR swapchain = _swapchain(); + VkSemaphore signalSemaphores[] = { _semaphores[_current_frame_index].getRenderImageSemaphore() }; + + VkPresentInfoKHR presentInfo{}; + presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + presentInfo.waitSemaphoreCount = 1; + presentInfo.pWaitSemaphores = signalSemaphores; + presentInfo.swapchainCount = 1; + presentInfo.pSwapchains = &swapchain; + presentInfo.pImageIndices = &_image_index; + + VkResult result = vkQueuePresentKHR(Render_Core::get().getQueue().getPresent(), &presentInfo); + + if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR || _framebufferResized) + { + _framebufferResized = false; + _swapchain.recreate(); + } + else if(result != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan error : failed to present swap chain image"); + _current_frame_index = (_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; + } + else + { + _cmd.getCmdBuffer(_current_frame_index).submitIdle(); + _current_frame_index = 0; + } + } + + void Renderer::destroy() + { + vkDeviceWaitIdle(Render_Core::get().getDevice().get()); + + _pipeline.destroy(); + _uniform_buffer->destroy(); + _vert_layout.destroy(); + _frag_layout.destroy(); + _cmd.destroy(); + _desc_pool.destroy(); + _pass.destroy(); + if(_render_target == nullptr) + { + _swapchain.destroy(); + _surface.destroy(); + } + for(auto& fb : _framebuffers) + fb.destroy(); + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + _semaphores[i].destroy(); + } +} diff --git a/MacroLibX/src/renderer/renderer.h b/MacroLibX/src/renderer/renderer.h new file mode 100644 index 0000000..fcd820c --- /dev/null +++ b/MacroLibX/src/renderer/renderer.h @@ -0,0 +1,145 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* renderer.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ +/* Updated: 2023/12/22 21:59:15 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __RENDERER__ +#define __RENDERER__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include + +namespace mlx +{ + struct Vertex + { + glm::vec2 pos; + glm::vec4 color; + glm::vec2 uv; + + Vertex(glm::vec2 _pos, glm::vec4 _color, glm::vec2 _uv) : pos(std::move(_pos)), color(std::move(_color)), uv(std::move(_uv)) {} + + static VkVertexInputBindingDescription getBindingDescription() + { + VkVertexInputBindingDescription bindingDescription{}; + bindingDescription.binding = 0; + bindingDescription.stride = sizeof(Vertex); + bindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX; + + return bindingDescription; + } + + static std::array getAttributeDescriptions() + { + std::array attributeDescriptions; + + attributeDescriptions[0].binding = 0; + attributeDescriptions[0].location = 0; + attributeDescriptions[0].format = VK_FORMAT_R32G32_SFLOAT; + attributeDescriptions[0].offset = offsetof(Vertex, pos); + + attributeDescriptions[1].binding = 0; + attributeDescriptions[1].location = 1; + attributeDescriptions[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; + attributeDescriptions[1].offset = offsetof(Vertex, color); + + attributeDescriptions[2].binding = 0; + attributeDescriptions[2].location = 2; + attributeDescriptions[2].format = VK_FORMAT_R32G32_SFLOAT; + attributeDescriptions[2].offset = offsetof(Vertex, uv); + + return attributeDescriptions; + } + }; + + class Renderer + { + public: + Renderer() = default; + + void init(class Texture* render_target); + + bool beginFrame(); + void endFrame(); + + void destroy(); + + inline class MLX_Window* getWindow() { return _window; } + inline void setWindow(class MLX_Window* window) { _window = window; } + + inline Surface& getSurface() noexcept { return _surface; } + inline CmdPool& getCmdPool() noexcept { return _cmd.getCmdPool(); } + inline UBO* getUniformBuffer() noexcept { return _uniform_buffer.get(); } + inline SwapChain& getSwapChain() noexcept { return _swapchain; } + inline Semaphore& getSemaphore(int i) noexcept { return _semaphores[i]; } + inline RenderPass& getRenderPass() noexcept { return _pass; } + inline GraphicPipeline& getPipeline() noexcept { return _pipeline; } + inline CmdBuffer& getCmdBuffer(int i) noexcept { return _cmd.getCmdBuffer(i); } + inline CmdBuffer& getActiveCmdBuffer() noexcept { return _cmd.getCmdBuffer(_current_frame_index); } + inline FrameBuffer& getFrameBuffer(int i) noexcept { return _framebuffers[i]; } + inline DescriptorSet& getVertDescriptorSet() noexcept { return _vert_set; } + inline DescriptorSet& getFragDescriptorSet() noexcept { return _frag_set; } + inline DescriptorSetLayout& getVertDescriptorSetLayout() noexcept { return _vert_layout; } + inline DescriptorSetLayout& getFragDescriptorSetLayout() noexcept { return _frag_layout; } + inline uint32_t getActiveImageIndex() noexcept { return _current_frame_index; } + inline uint32_t getImageIndex() noexcept { return _image_index; } + + constexpr inline void requireFrameBufferResize() noexcept { _framebufferResized = true; } + + ~Renderer() = default; + + private: + GraphicPipeline _pipeline; + CmdManager _cmd; + RenderPass _pass; + Surface _surface; + SwapChain _swapchain; + std::array _semaphores; + std::vector _framebuffers; + + DescriptorPool _desc_pool; + + DescriptorSetLayout _vert_layout; + DescriptorSetLayout _frag_layout; + + DescriptorSet _vert_set; + DescriptorSet _frag_set; + + std::unique_ptr _uniform_buffer; + + class MLX_Window* _window = nullptr; + class Texture* _render_target = nullptr; + + uint32_t _current_frame_index = 0; + uint32_t _image_index = 0; + bool _framebufferResized = false; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/renderpass/vk_framebuffer.cpp b/MacroLibX/src/renderer/renderpass/vk_framebuffer.cpp new file mode 100644 index 0000000..c96c54d --- /dev/null +++ b/MacroLibX/src/renderer/renderpass/vk_framebuffer.cpp @@ -0,0 +1,49 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_framebuffer.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:18:06 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:17:27 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include + +namespace mlx +{ + void FrameBuffer::init(RenderPass& renderpass, Image& image) + { + VkImageView attachments[] = { image.getImageView() }; + + _width = image.getWidth(); + _height = image.getHeight(); + + VkFramebufferCreateInfo framebufferInfo{}; + framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + framebufferInfo.renderPass = renderpass.get(); + framebufferInfo.attachmentCount = 1; + framebufferInfo.pAttachments = attachments; + framebufferInfo.width = _width; + framebufferInfo.height = _height; + framebufferInfo.layers = 1; + + VkResult res = vkCreateFramebuffer(Render_Core::get().getDevice().get(), &framebufferInfo, nullptr, &_framebuffer); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a framebuffer, %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new framebuffer"); + #endif + } + + void FrameBuffer::destroy() noexcept + { + vkDestroyFramebuffer(Render_Core::get().getDevice().get(), _framebuffer, nullptr); + _framebuffer = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/renderpass/vk_framebuffer.h b/MacroLibX/src/renderer/renderpass/vk_framebuffer.h new file mode 100644 index 0000000..b4db929 --- /dev/null +++ b/MacroLibX/src/renderer/renderpass/vk_framebuffer.h @@ -0,0 +1,39 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_framebuffer.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:19:44 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:28:19 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_FRAMEBUFFER__ +#define __MLX_VK_FRAMEBUFFER__ + +#include +#include + +namespace mlx +{ + class FrameBuffer + { + public: + void init(class RenderPass& renderpass, class Image& image); + void destroy() noexcept; + + inline VkFramebuffer& operator()() noexcept { return _framebuffer; } + inline VkFramebuffer& get() noexcept { return _framebuffer; } + inline uint32_t getWidth() const noexcept { return _width; } + inline uint32_t getHeight() const noexcept { return _height; } + + private: + VkFramebuffer _framebuffer = VK_NULL_HANDLE; + uint32_t _width = 0; + uint32_t _height = 0; + }; +} + +#endif // __MLX_VK_FRAMEBUFFER__ diff --git a/MacroLibX/src/renderer/renderpass/vk_render_pass.cpp b/MacroLibX/src/renderer/renderpass/vk_render_pass.cpp new file mode 100644 index 0000000..6bd6b6c --- /dev/null +++ b/MacroLibX/src/renderer/renderpass/vk_render_pass.cpp @@ -0,0 +1,113 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_render_pass.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:21:36 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:17:56 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include "vk_render_pass.h" +#include +#include +#include + +namespace mlx +{ + static const VkClearValue clearColor = {{{ 0.0f, 0.0f, 0.0f, 1.0f }}}; // wtf, this mess to satisfy a warning + + void RenderPass::init(VkFormat attachement_format, VkImageLayout layout) + { + VkAttachmentDescription colorAttachment{}; + colorAttachment.format = attachement_format; + colorAttachment.samples = VK_SAMPLE_COUNT_1_BIT; + colorAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + colorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + colorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + colorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + colorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + colorAttachment.finalLayout = layout; + + VkAttachmentReference colorAttachmentRef{}; + colorAttachmentRef.attachment = 0; + colorAttachmentRef.layout = (layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ? VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL : layout); + + VkSubpassDescription subpass1{}; + subpass1.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + subpass1.colorAttachmentCount = 1; + subpass1.pColorAttachments = &colorAttachmentRef; + + VkSubpassDescription subpasses[] = { subpass1 }; + + std::vector subpassesDeps; + subpassesDeps.emplace_back(); + subpassesDeps.back().srcSubpass = VK_SUBPASS_EXTERNAL; + subpassesDeps.back().dstSubpass = 0; + subpassesDeps.back().srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + subpassesDeps.back().dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + subpassesDeps.back().srcAccessMask = VK_ACCESS_MEMORY_READ_BIT; + subpassesDeps.back().dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + subpassesDeps.back().dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; + + subpassesDeps.emplace_back(); + subpassesDeps.back().srcSubpass = 0; + subpassesDeps.back().dstSubpass = VK_SUBPASS_EXTERNAL; + subpassesDeps.back().srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + subpassesDeps.back().dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + subpassesDeps.back().srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + subpassesDeps.back().dstAccessMask = VK_ACCESS_MEMORY_READ_BIT; + subpassesDeps.back().dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; + + VkRenderPassCreateInfo renderPassInfo{}; + renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + renderPassInfo.attachmentCount = 1; + renderPassInfo.pAttachments = &colorAttachment; + renderPassInfo.subpassCount = sizeof(subpasses) / sizeof(VkSubpassDescription); + renderPassInfo.pSubpasses = subpasses; + renderPassInfo.dependencyCount = static_cast(subpassesDeps.size()); + renderPassInfo.pDependencies = subpassesDeps.data(); + + VkResult res = vkCreateRenderPass(Render_Core::get().getDevice().get(), &renderPassInfo, nullptr, &_renderPass); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create render pass, %s", RCore::verbaliseResultVk(res)); + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new render pass"); + #endif + } + + void RenderPass::begin(class CmdBuffer& cmd, class FrameBuffer& fb) + { + if(_is_running) + return; + + VkRenderPassBeginInfo renderPassInfo{}; + renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + renderPassInfo.renderPass = _renderPass; + renderPassInfo.framebuffer = fb.get(); + renderPassInfo.renderArea.offset = { 0, 0 }; + renderPassInfo.renderArea.extent = { fb.getWidth(), fb.getHeight() }; + renderPassInfo.clearValueCount = 1; + renderPassInfo.pClearValues = &clearColor; + + vkCmdBeginRenderPass(cmd.get(), &renderPassInfo, VK_SUBPASS_CONTENTS_INLINE); + + _is_running = true; + } + + void RenderPass::end(class CmdBuffer& cmd) + { + if(!_is_running) + return; + vkCmdEndRenderPass(cmd.get()); + _is_running = false; + } + + void RenderPass::destroy() noexcept + { + vkDestroyRenderPass(Render_Core::get().getDevice().get(), _renderPass, nullptr); + _renderPass = VK_NULL_HANDLE; + } +} diff --git a/MacroLibX/src/renderer/renderpass/vk_render_pass.h b/MacroLibX/src/renderer/renderpass/vk_render_pass.h new file mode 100644 index 0000000..68d190e --- /dev/null +++ b/MacroLibX/src/renderer/renderpass/vk_render_pass.h @@ -0,0 +1,39 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_render_pass.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:22:00 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:28:25 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_RENDER_PASS__ +#define __MLX_VK_RENDER_PASS__ + +#include +#include + +namespace mlx +{ + class RenderPass + { + public: + void init(VkFormat attachement_format, VkImageLayout layout); + void destroy() noexcept; + + void begin(class CmdBuffer& cmd, class FrameBuffer& fb); + void end(class CmdBuffer& cmd); + + inline VkRenderPass& operator()() noexcept { return _renderPass; } + inline VkRenderPass& get() noexcept { return _renderPass; } + + private: + VkRenderPass _renderPass = VK_NULL_HANDLE; + bool _is_running = false; + }; +} + +#endif // __MLX_VK_RENDER_PASS__ diff --git a/MacroLibX/src/renderer/swapchain/vk_swapchain.cpp b/MacroLibX/src/renderer/swapchain/vk_swapchain.cpp new file mode 100644 index 0000000..ccd0620 --- /dev/null +++ b/MacroLibX/src/renderer/swapchain/vk_swapchain.cpp @@ -0,0 +1,152 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_swapchain.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:22:28 by maldavid #+# #+# */ +/* Updated: 2024/01/03 13:18:25 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include +#include + +namespace mlx +{ + void SwapChain::init(Renderer* renderer) + { + VkDevice device = Render_Core::get().getDevice().get(); + + _renderer = renderer; + _swapChainSupport = querySwapChainSupport(Render_Core::get().getDevice().getPhysicalDevice()); + + VkSurfaceFormatKHR surfaceFormat = renderer->getSurface().chooseSwapSurfaceFormat(_swapChainSupport.formats); + VkPresentModeKHR presentMode = chooseSwapPresentMode(_swapChainSupport.presentModes); + _extent = chooseSwapExtent(_swapChainSupport.capabilities); + + uint32_t imageCount = _swapChainSupport.capabilities.minImageCount + 1; + if(_swapChainSupport.capabilities.maxImageCount > 0 && imageCount > _swapChainSupport.capabilities.maxImageCount) + imageCount = _swapChainSupport.capabilities.maxImageCount; + + Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().findQueueFamilies(Render_Core::get().getDevice().getPhysicalDevice(), renderer->getSurface().get()); + uint32_t queueFamilyIndices[] = { indices.graphicsFamily.value(), indices.presentFamily.value() }; + + VkSwapchainCreateInfoKHR createInfo{}; + createInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; + createInfo.surface = renderer->getSurface().get(); + createInfo.minImageCount = imageCount; + createInfo.imageFormat = surfaceFormat.format; + createInfo.imageColorSpace = surfaceFormat.colorSpace; + createInfo.imageExtent = _extent; + createInfo.imageArrayLayers = 1; + createInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + createInfo.preTransform = _swapChainSupport.capabilities.currentTransform; + createInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; + createInfo.presentMode = presentMode; + createInfo.clipped = VK_TRUE; + createInfo.oldSwapchain = VK_NULL_HANDLE; + if(indices.graphicsFamily != indices.presentFamily) + { + createInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT; + createInfo.queueFamilyIndexCount = 2; + createInfo.pQueueFamilyIndices = queueFamilyIndices; + } + else + createInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; + + VkResult res = vkCreateSwapchainKHR(device, &createInfo, nullptr, &_swapChain); + if(res != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create the swapchain, %s", RCore::verbaliseResultVk(res)); + + std::vector tmp; + vkGetSwapchainImagesKHR(device, _swapChain, &imageCount, nullptr); + _images.resize(imageCount); + tmp.resize(imageCount); + vkGetSwapchainImagesKHR(device, _swapChain, &imageCount, tmp.data()); + + for(std::size_t i = 0; i < imageCount; i++) + { + _images[i].create(tmp[i], surfaceFormat.format, _extent.width, _extent.height); + _images[i].transitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); + _images[i].createImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + } + + _swapChainImageFormat = surfaceFormat.format; + #ifdef DEBUG + core::error::report(e_kind::message, "Vulkan : created new swapchain"); + #endif + } + + SwapChain::SwapChainSupportDetails SwapChain::querySwapChainSupport(VkPhysicalDevice device) + { + SwapChain::SwapChainSupportDetails details; + VkSurfaceKHR surface = _renderer->getSurface().get(); + + if(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(device, surface, &details.capabilities) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : unable to retrieve surface capabilities"); + + uint32_t formatCount = 0; + vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &formatCount, nullptr); + + if(formatCount != 0) + { + details.formats.resize(formatCount); + vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &formatCount, details.formats.data()); + } + + uint32_t presentModeCount; + vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &presentModeCount, nullptr); + + if(presentModeCount != 0) + { + details.presentModes.resize(presentModeCount); + vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &presentModeCount, details.presentModes.data()); + } + + return details; + } + + VkPresentModeKHR SwapChain::chooseSwapPresentMode([[maybe_unused]] const std::vector& availablePresentModes) + { + // in the future, you may choose to activate vsync or not + return VK_PRESENT_MODE_IMMEDIATE_KHR; + } + + VkExtent2D SwapChain::chooseSwapExtent(const VkSurfaceCapabilitiesKHR& capabilities) + { + if(capabilities.currentExtent.width != std::numeric_limits::max()) + return capabilities.currentExtent; + + int width, height; + SDL_Vulkan_GetDrawableSize(_renderer->getWindow()->getNativeWindow(), &width, &height); + + VkExtent2D actualExtent = { static_cast(width), static_cast(height) }; + + actualExtent.width = std::clamp(actualExtent.width, capabilities.minImageExtent.width, capabilities.maxImageExtent.width); + actualExtent.height = std::clamp(actualExtent.height, capabilities.minImageExtent.height, capabilities.maxImageExtent.height); + + return actualExtent; + } + + void SwapChain::recreate() + { + destroy(); + init(_renderer); + } + + void SwapChain::destroy() noexcept + { + if(_swapChain == VK_NULL_HANDLE) + return; + vkDeviceWaitIdle(Render_Core::get().getDevice().get()); + vkDestroySwapchainKHR(Render_Core::get().getDevice().get(), _swapChain, nullptr); + _swapChain = VK_NULL_HANDLE; + for(Image& img : _images) + img.destroyImageView(); + } +} diff --git a/MacroLibX/src/renderer/swapchain/vk_swapchain.h b/MacroLibX/src/renderer/swapchain/vk_swapchain.h new file mode 100644 index 0000000..5fd8827 --- /dev/null +++ b/MacroLibX/src/renderer/swapchain/vk_swapchain.h @@ -0,0 +1,68 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* vk_swapchain.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:23:27 by maldavid #+# #+# */ +/* Updated: 2024/01/03 15:28:39 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_SWAPCHAIN__ +#define __MLX_VK_SWAPCHAIN__ + +#include +#include +#include +#include + +namespace mlx +{ + class SwapChain + { + friend class GraphicPipeline; + friend class RenderPass; + friend class Renderer; + + public: + struct SwapChainSupportDetails + { + VkSurfaceCapabilitiesKHR capabilities; + std::vector formats; + std::vector presentModes; + }; + + public: + SwapChain() = default; + + void init(class Renderer* renderer); + void recreate(); + void destroy() noexcept; + + SwapChainSupportDetails querySwapChainSupport(VkPhysicalDevice device); + VkExtent2D chooseSwapExtent(const VkSurfaceCapabilitiesKHR& capabilities); + VkPresentModeKHR chooseSwapPresentMode([[maybe_unused]] const std::vector &availablePresentModes); + + inline VkSwapchainKHR get() noexcept { return _swapChain; } + inline VkSwapchainKHR operator()() noexcept { return _swapChain; } + inline size_t getImagesNumber() const noexcept { return _images.size(); } + inline Image& getImage(std::size_t i) noexcept { return _images[i]; } + inline SwapChainSupportDetails getSupport() noexcept { return _swapChainSupport; } + inline VkExtent2D getExtent() noexcept { return _extent; } + inline VkFormat getImagesFormat() const noexcept { return _swapChainImageFormat; } + + ~SwapChain() = default; + + private: + SwapChainSupportDetails _swapChainSupport; + VkSwapchainKHR _swapChain; + std::vector _images; + VkFormat _swapChainImageFormat; + VkExtent2D _extent; + class Renderer* _renderer = nullptr; + }; +} + +#endif // __MLX_VK_SWAPCHAIN__ diff --git a/MacroLibX/src/renderer/text_library.cpp b/MacroLibX/src/renderer/text_library.cpp new file mode 100644 index 0000000..0a68dd4 --- /dev/null +++ b/MacroLibX/src/renderer/text_library.cpp @@ -0,0 +1,88 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* text_library.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/10 11:59:57 by maldavid #+# #+# */ +/* Updated: 2023/12/12 23:03:33 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include + +namespace mlx +{ + void TextData::init(std::string text, Font const* font, std::vector vbo_data, std::vector ibo_data) + { + _text = std::move(text); + _font = font; + #ifdef DEBUG + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + _vbo[i].create(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data()), _text.c_str()); + _ibo.create(sizeof(uint16_t) * ibo_data.size(), ibo_data.data(), _text.c_str()); + #else + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + _vbo[i].create(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data()), nullptr); + _ibo.create(sizeof(uint16_t) * ibo_data.size(), ibo_data.data(), nullptr); + #endif + } + + void TextData::bind(Renderer& renderer) noexcept + { + _vbo[renderer.getActiveImageIndex()].bind(renderer); + _ibo.bind(renderer); + } + + void TextData::updateVertexData(int frame, std::vector vbo_data) + { + _vbo[frame].setData(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data())); + } + + void TextData::destroy() noexcept + { + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + _vbo[i].destroy(); + _ibo.destroy(); + } + + std::shared_ptr TextLibrary::getTextData(TextID id) + { + if(!_cache.count(id)) + core::error::report(e_kind::fatal_error, "Text Library : wrong text ID '%d'", id); + return _cache[id]; + } + + TextID TextLibrary::addTextToLibrary(std::shared_ptr text) + { + auto it = std::find_if(_cache.begin(), _cache.end(), [=](const std::pair>& v) + { + return v.second->getText() == text->getText(); + }); + if(it != _cache.end()) + return it->first; + _cache[_current_id] = text; + _current_id++; + return _current_id - 1; + } + + void TextLibrary::removeTextFromLibrary(TextID id) + { + if(_cache.count(id)) + { + _cache[id]->destroy(); + _cache.erase(id); + } + } + + void TextLibrary::clearLibrary() + { + for(auto [id, text] : _cache) + text->destroy(); + _cache.clear(); + } +} diff --git a/MacroLibX/src/renderer/text_library.h b/MacroLibX/src/renderer/text_library.h new file mode 100644 index 0000000..1283a99 --- /dev/null +++ b/MacroLibX/src/renderer/text_library.h @@ -0,0 +1,73 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* text_library.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/10 11:52:30 by maldavid #+# #+# */ +/* Updated: 2023/12/12 23:07:13 by kbz_8 ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_TEXT_LIBRARY__ +#define __MLX_TEXT_LIBRARY__ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + using TextID = uint32_t; + constexpr TextID nulltext = 0; + + class TextData + { + public: + TextData() = default; + + void init(std::string text, Font const* font, std::vector vbo_data, std::vector ibo_data); + void bind(class Renderer& renderer) noexcept; + inline const Font& getFontInUse() const noexcept { return *_font; } + void updateVertexData(int frame, std::vector vbo_data); + inline uint32_t getIBOsize() noexcept { return _ibo.getSize(); } + inline const std::string& getText() const { return _text; } + void destroy() noexcept; + + ~TextData() = default; + + private: + std::array _vbo; + C_IBO _ibo; + std::string _text; + Font const* _font = nullptr; + }; + + class TextLibrary + { + public: + TextLibrary() = default; + + std::shared_ptr getTextData(TextID id); + TextID addTextToLibrary(std::shared_ptr text); + void removeTextFromLibrary(TextID id); + + void clearLibrary(); + + ~TextLibrary() = default; + + private: + std::unordered_map> _cache; + TextID _current_id = 1; + }; +} + +#endif diff --git a/MacroLibX/src/renderer/text_pipeline.cpp b/MacroLibX/src/renderer/text_pipeline.cpp new file mode 100644 index 0000000..173e6f9 --- /dev/null +++ b/MacroLibX/src/renderer/text_pipeline.cpp @@ -0,0 +1,128 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* text_pipeline.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/06 16:41:13 by maldavid #+# #+# */ +/* Updated: 2023/12/14 17:49:37 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include + +#include +#include +#include + +#define STB_RECT_PACK_IMPLEMENTATION +#include + +#include + +#define STB_TRUETYPE_IMPLEMENTATION +#define STB_malloc(x, u) ((void)(u), MemManager::malloc(x)) +#define STB_free(x, u) ((void)(u), MemManager::free(x)) +#include + +constexpr const int RANGE = 1024; + +namespace mlx +{ + TextDrawData::TextDrawData(std::string _text, int _color, int _x, int _y) : + x(_x), y(_y), color(_color), + text(std::move(_text)) + {} + + void TextDrawData::init(TextLibrary& library, Font* const font) noexcept + { + std::vector vertexData; + std::vector indexData; + + float stb_x = 0.0f; + float stb_y = 0.0f; + + for(char c : text) + { + if(c < 32) + continue; + + stbtt_aligned_quad q; + stbtt_GetPackedQuad(font->getCharData().data(), RANGE, RANGE, c - 32, &stb_x, &stb_y, &q, 1); + + std::size_t index = vertexData.size(); + + vertexData.emplace_back(glm::vec2{q.x0, q.y0}, glm::vec4{color & 0x00FF0000, color & 0x0000FF00, color & 0x000000FF, 0xFFFFFFFF}, glm::vec2{q.s0, q.t0}); + vertexData.emplace_back(glm::vec2{q.x1, q.y0}, glm::vec4{color & 0x00FF0000, color & 0x0000FF00, color & 0x000000FF, 0xFFFFFFFF}, glm::vec2{q.s1, q.t0}); + vertexData.emplace_back(glm::vec2{q.x1, q.y1}, glm::vec4{color & 0x00FF0000, color & 0x0000FF00, color & 0x000000FF, 0xFFFFFFFF}, glm::vec2{q.s1, q.t1}); + vertexData.emplace_back(glm::vec2{q.x0, q.y1}, glm::vec4{color & 0x00FF0000, color & 0x0000FF00, color & 0x000000FF, 0xFFFFFFFF}, glm::vec2{q.s0, q.t1}); + + indexData.emplace_back(index + 0); + indexData.emplace_back(index + 1); + indexData.emplace_back(index + 2); + indexData.emplace_back(index + 2); + indexData.emplace_back(index + 3); + indexData.emplace_back(index + 0); + } + std::shared_ptr text_data = std::make_shared(); + text_data->init(text, font, std::move(vertexData), std::move(indexData)); + id = library.addTextToLibrary(text_data); + + #ifdef DEBUG + core::error::report(e_kind::message, "Text put : registered new text to render"); + #endif + } + + void TextPutPipeline::init(Renderer* renderer) noexcept + { + _renderer = renderer; + _font_in_use = &const_cast(*_font_set.emplace(*_renderer, "default", dogica_ttf, 6.0f).first); + } + + void TextPutPipeline::loadFont(const std::filesystem::path& filepath, float scale) + { + if(filepath.string() == "default") // we're sure it is already loaded + _font_in_use = &const_cast(*_font_set.emplace(*_renderer, "default", dogica_ttf, scale).first); + else + _font_in_use = &const_cast(*_font_set.emplace(*_renderer, filepath, scale).first); + } + + void TextPutPipeline::put(int x, int y, int color, std::string str) + { + auto res = _drawlist.emplace(std::move(str), color, x, y); + if(res.second) + const_cast(*res.first).init(_library, _font_in_use); + else + { + auto text_ptr = _library.getTextData(res.first->id); + if(*_font_in_use != text_ptr->getFontInUse()) + { + _library.removeTextFromLibrary(res.first->id); + const_cast(*res.first).init(_library, _font_in_use); + } + } + } + + void TextPutPipeline::render(std::array& sets) + { + for(auto& draw : _drawlist) + { + std::shared_ptr draw_data = _library.getTextData(draw.id); + const TextureAtlas& atlas = draw_data->getFontInUse().getAtlas(); + draw_data->bind(*_renderer); + atlas.updateSet(0); + sets[1] = const_cast(atlas).getSet(); + vkCmdBindDescriptorSets(_renderer->getActiveCmdBuffer().get(), VK_PIPELINE_BIND_POINT_GRAPHICS, _renderer->getPipeline().getPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); + atlas.render(*_renderer, draw.x, draw.y, draw_data->getIBOsize()); + } + } + + void TextPutPipeline::destroy() noexcept + { + _library.clearLibrary(); + _drawlist.clear(); + _font_set.clear(); + } +} diff --git a/MacroLibX/src/renderer/text_pipeline.h b/MacroLibX/src/renderer/text_pipeline.h new file mode 100644 index 0000000..efb5058 --- /dev/null +++ b/MacroLibX/src/renderer/text_pipeline.h @@ -0,0 +1,82 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* text_pipeline.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/06 16:24:11 by maldavid #+# #+# */ +/* Updated: 2023/12/14 17:39:51 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_TEXT_PIPELINE__ +#define __MLX_TEXT_PIPELINE__ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + struct TextDrawData + { + TextID id; + int x; + int y; + int color; + std::string text; + + TextDrawData(std::string text, int _color, int _x, int _y); + void init(TextLibrary& library, Font* const font) noexcept; + bool operator==(const TextDrawData& rhs) const { return text == rhs.text && x == rhs.x && y == rhs.y && color == rhs.color; } + TextDrawData() = default; + }; +} + +namespace std +{ + template <> + struct hash + { + std::size_t operator()(const mlx::TextDrawData& d) const noexcept + { + std::size_t hash = 0; + mlx::hashCombine(hash, d.text, d.x, d.y, d.color); + return hash; + } + }; +} + +namespace mlx +{ + class TextPutPipeline + { + public: + TextPutPipeline() = default; + + void init(Renderer* renderer) noexcept; + void put(int x, int y, int color, std::string str); + inline void clear() { _drawlist.clear(); _library.clearLibrary(); } + void loadFont(const std::filesystem::path& filepath, float scale); + void render(std::array& sets); + void destroy() noexcept; + + ~TextPutPipeline() = default; + + private: + std::unordered_set _font_set; + TextLibrary _library; + std::unordered_set _drawlist; + Renderer* _renderer = nullptr; + Font* _font_in_use = nullptr; + }; +} + +#endif diff --git a/MacroLibX/src/utils/combine_hash.h b/MacroLibX/src/utils/combine_hash.h new file mode 100644 index 0000000..48a33a3 --- /dev/null +++ b/MacroLibX/src/utils/combine_hash.h @@ -0,0 +1,32 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* combine_hash.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/12/14 16:16:06 by maldavid #+# #+# */ +/* Updated: 2023/12/14 16:47:39 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_HASH__ +#define __MLX_HASH__ + +#include +#include + +namespace mlx +{ + inline void hashCombine([[maybe_unused]] std::size_t& seed) noexcept {} + + template + inline void hashCombine(std::size_t& seed, const T& v, Rest... rest) + { + std::hash hasher; + seed ^= hasher(v) + 0x9e3779b9 + (seed << 6) + (seed >> 2); + hashCombine(seed, rest...); + } +} + +#endif diff --git a/MacroLibX/src/utils/dogica_ttf.h b/MacroLibX/src/utils/dogica_ttf.h new file mode 100644 index 0000000..e890ab3 --- /dev/null +++ b/MacroLibX/src/utils/dogica_ttf.h @@ -0,0 +1,2845 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* dogica_ttf.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/11 16:20:25 by maldavid #+# #+# */ +/* Updated: 2023/12/14 16:54:12 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_DOGICA_TTF__ +#define __MLX_DOGICA_TTF__ + +#include + +constexpr const unsigned int dogica_ttf_len = 33860; + +static const std::vector dogica_ttf = { + 0x00, 0x01, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x80, 0x00, 0x03, 0x00, 0x60, + 0x46, 0x46, 0x54, 0x4d, 0x8f, 0xe1, 0x5b, 0x60, 0x00, 0x00, 0x84, 0x28, + 0x00, 0x00, 0x00, 0x1c, 0x47, 0x44, 0x45, 0x46, 0x00, 0x15, 0x00, 0x14, + 0x00, 0x00, 0x84, 0x0c, 0x00, 0x00, 0x00, 0x1c, 0x4f, 0x53, 0x2f, 0x32, + 0x56, 0x4b, 0x04, 0x26, 0x00, 0x00, 0x01, 0x68, 0x00, 0x00, 0x00, 0x60, + 0x63, 0x6d, 0x61, 0x70, 0x26, 0x3a, 0x44, 0x67, 0x00, 0x00, 0x03, 0xdc, + 0x00, 0x00, 0x04, 0x08, 0x63, 0x76, 0x74, 0x20, 0x00, 0x1a, 0x01, 0xfb, + 0x00, 0x00, 0x07, 0xe4, 0x00, 0x00, 0x00, 0x04, 0x67, 0x61, 0x73, 0x70, + 0xff, 0xff, 0x00, 0x03, 0x00, 0x00, 0x84, 0x04, 0x00, 0x00, 0x00, 0x08, + 0x67, 0x6c, 0x79, 0x66, 0x77, 0xaa, 0x47, 0xf8, 0x00, 0x00, 0x09, 0xf8, + 0x00, 0x00, 0x3f, 0xc8, 0x68, 0x65, 0x61, 0x64, 0x17, 0x8e, 0x9c, 0xef, + 0x00, 0x00, 0x00, 0xec, 0x00, 0x00, 0x00, 0x36, 0x68, 0x68, 0x65, 0x61, + 0x05, 0xdd, 0x03, 0x89, 0x00, 0x00, 0x01, 0x24, 0x00, 0x00, 0x00, 0x24, + 0x68, 0x6d, 0x74, 0x78, 0x42, 0xcc, 0x37, 0x2e, 0x00, 0x00, 0x01, 0xc8, + 0x00, 0x00, 0x02, 0x14, 0x6c, 0x6f, 0x63, 0x61, 0xaf, 0xca, 0x9f, 0xce, + 0x00, 0x00, 0x07, 0xe8, 0x00, 0x00, 0x02, 0x0e, 0x6d, 0x61, 0x78, 0x70, + 0x01, 0x57, 0x00, 0x6d, 0x00, 0x00, 0x01, 0x48, 0x00, 0x00, 0x00, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x8d, 0x81, 0x93, 0xff, 0x00, 0x00, 0x49, 0xc0, + 0x00, 0x00, 0x36, 0x81, 0x70, 0x6f, 0x73, 0x74, 0x73, 0x17, 0x5a, 0x72, + 0x00, 0x00, 0x80, 0x44, 0x00, 0x00, 0x03, 0xbe, 0x00, 0x01, 0x00, 0x00, + 0x00, 0x01, 0x03, 0x12, 0x20, 0xcc, 0x85, 0x83, 0x5f, 0x0f, 0x3c, 0xf5, + 0x00, 0x0b, 0x03, 0x20, 0x00, 0x00, 0x00, 0x00, 0xda, 0x6d, 0x86, 0xe0, + 0x00, 0x00, 0x00, 0x00, 0xda, 0xe1, 0xd0, 0x88, 0x00, 0x00, 0xff, 0x9c, + 0x03, 0x20, 0x02, 0xbc, 0x00, 0x00, 0x00, 0x08, 0x00, 0x02, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0xbc, 0x00, 0x64, + 0x00, 0x00, 0x03, 0x20, 0x00, 0x00, 0x00, 0x00, 0x03, 0x20, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x00, 0x01, 0x06, 0x00, 0x3c, + 0x00, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, + 0x00, 0x01, 0x00, 0x00, 0x00, 0x40, 0x00, 0x2e, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x04, 0x03, 0x20, 0x01, 0x90, 0x00, 0x05, 0x00, 0x00, 0x02, 0x08, + 0x02, 0x2f, 0x00, 0x00, 0x00, 0x70, 0x02, 0x08, 0x02, 0x2f, 0x00, 0x00, + 0x01, 0x7f, 0x00, 0x27, 0x00, 0xcf, 0x00, 0x00, 0x02, 0x00, 0x05, 0x09, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x2f, 0x00, 0x00, + 0x00, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x66, + 0x45, 0x64, 0x00, 0xc0, 0x00, 0x00, 0x21, 0x22, 0x02, 0xbc, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x00, 0x64, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x00, 0x00, 0x20, 0x00, 0x01, + 0x03, 0x20, 0x00, 0x1a, 0x03, 0x20, 0x00, 0x00, 0x03, 0x20, 0x00, 0x00, + 0x03, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x2c, 0x00, 0xc8, 0x00, 0x64, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x01, 0x2c, 0x01, 0x2c, 0x01, 0x2c, 0x00, 0x64, + 0x00, 0x64, 0x01, 0x2c, 0x00, 0xc8, 0x01, 0x2c, 0x00, 0xc8, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x01, 0x2c, 0x01, 0x2c, 0x00, 0xc8, + 0x00, 0x64, 0x01, 0x2c, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x00, 0xc8, 0x00, 0x64, 0x00, 0x64, + 0x00, 0xc8, 0x00, 0x64, 0x00, 0x64, 0x00, 0xc8, 0x00, 0x00, 0x00, 0x64, + 0x00, 0xc8, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, + 0x01, 0x2c, 0x00, 0xc8, 0x01, 0x2c, 0x00, 0xc8, 0x00, 0x64, 0x01, 0x2c, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0xc8, 0x00, 0xc8, 0x00, 0x64, 0x00, 0xc8, + 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, 0x00, 0xc8, 0x00, 0x64, 0x00, 0xc8, + 0x00, 0x64, 0x00, 0xc8, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, + 0x00, 0xc8, 0x00, 0xc8, 0x00, 0xc8, 0x01, 0x2c, 0x00, 0xc8, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x2c, 0x00, 0xc8, 0x00, 0xc8, + 0x00, 0x64, 0x00, 0xc8, 0x00, 0xc8, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, + 0x01, 0x2c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x2c, 0x01, 0x2c, 0x00, 0xc8, 0x00, 0xc8, 0x00, 0xc8, 0x00, 0xc8, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x01, 0x2c, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x01, 0x2c, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x01, 0x2c, 0x00, 0xc8, 0x00, 0xc8, + 0x00, 0x00, 0x00, 0x64, 0x00, 0xc8, 0x00, 0x64, 0x01, 0x2c, 0x00, 0x00, + 0x00, 0x64, 0x00, 0xc8, 0x00, 0x64, 0x00, 0xc8, 0x00, 0xc8, 0x01, 0x2c, + 0x00, 0x64, 0x00, 0x64, 0x01, 0x2c, 0x01, 0x2c, 0x00, 0xc8, 0x00, 0x64, + 0x00, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x00, 0xc8, + 0x00, 0xc8, 0x00, 0x64, 0x00, 0xc8, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, + 0x00, 0x64, 0x00, 0xc8, 0x00, 0xc8, 0x00, 0x64, 0x00, 0xc8, 0x00, 0x64, + 0x00, 0x00, 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x00, 0x64, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x64, 0x00, 0x00, 0x00, 0xc8, 0x00, 0xc8, + 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x03, + 0x00, 0x00, 0x00, 0x1c, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0xfe, + 0x00, 0x03, 0x00, 0x01, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x04, 0x01, 0xe2, + 0x00, 0x00, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x04, 0x00, 0x0c, 0x00, 0x7f, + 0x00, 0xff, 0x01, 0x33, 0x01, 0x40, 0x01, 0x49, 0x01, 0x53, 0x01, 0x61, + 0x01, 0x78, 0x01, 0x7e, 0x01, 0x92, 0x02, 0xc6, 0x02, 0xdc, 0x20, 0x14, + 0x20, 0x1a, 0x20, 0x1e, 0x20, 0x22, 0x20, 0x26, 0x20, 0x30, 0x20, 0x3a, + 0x20, 0xac, 0x21, 0x22, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa0, + 0x01, 0x32, 0x01, 0x3f, 0x01, 0x49, 0x01, 0x52, 0x01, 0x60, 0x01, 0x78, + 0x01, 0x7d, 0x01, 0x92, 0x02, 0xc6, 0x02, 0xdc, 0x20, 0x13, 0x20, 0x18, + 0x20, 0x1c, 0x20, 0x20, 0x20, 0x26, 0x20, 0x30, 0x20, 0x39, 0x20, 0xac, + 0x21, 0x22, 0xff, 0xff, 0x00, 0x00, 0x00, 0x01, 0xff, 0xd2, 0xff, 0xc3, + 0xff, 0xb8, 0x00, 0x00, 0x00, 0x00, 0xff, 0x28, 0x00, 0x00, 0xfe, 0xf2, + 0xfd, 0xc3, 0xfd, 0xbd, 0xe0, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xe0, 0x60, 0xe0, 0x5a, 0x00, 0x00, 0xdf, 0xd5, 0xdf, 0x78, 0x00, 0x01, + 0x00, 0x2c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x22, + 0x01, 0x24, 0x00, 0x00, 0x01, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x1e, 0x01, 0x22, 0x01, 0x26, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x26, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x03, + 0x00, 0x04, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x08, 0x00, 0x09, + 0x00, 0x0a, 0x00, 0x0b, 0x00, 0x0c, 0x00, 0x0d, 0x00, 0x0e, 0x00, 0x02, + 0x00, 0x0f, 0x00, 0x10, 0x00, 0x11, 0x00, 0x12, 0x00, 0x13, 0x00, 0x14, + 0x00, 0x15, 0x00, 0x16, 0x00, 0x17, 0x00, 0x18, 0x00, 0x19, 0x00, 0x1a, + 0x00, 0x1b, 0x00, 0x1c, 0x00, 0x1d, 0x00, 0x1e, 0x00, 0x1f, 0x00, 0x20, + 0x00, 0x21, 0x00, 0x22, 0x00, 0x23, 0x00, 0x24, 0x00, 0x25, 0x00, 0x26, + 0x00, 0x27, 0x00, 0x28, 0x00, 0x29, 0x00, 0x2a, 0x00, 0x2b, 0x00, 0x2c, + 0x00, 0x2d, 0x00, 0x2e, 0x00, 0x2f, 0x00, 0x30, 0x00, 0x31, 0x00, 0x32, + 0x00, 0x33, 0x00, 0x34, 0x00, 0x35, 0x00, 0x36, 0x00, 0x37, 0x00, 0x38, + 0x00, 0x39, 0x00, 0x3a, 0x00, 0x3b, 0x00, 0x3c, 0x00, 0x3d, 0x00, 0x3e, + 0x00, 0x3f, 0x00, 0x40, 0x00, 0x41, 0x00, 0x42, 0x00, 0x43, 0x00, 0x44, + 0x00, 0x45, 0x00, 0x46, 0x00, 0x47, 0x00, 0x48, 0x00, 0x49, 0x00, 0x4a, + 0x00, 0x4b, 0x00, 0x4c, 0x00, 0x4d, 0x00, 0x4e, 0x00, 0x4f, 0x00, 0x50, + 0x00, 0x51, 0x00, 0x52, 0x00, 0x53, 0x00, 0x54, 0x00, 0x55, 0x00, 0x56, + 0x00, 0x57, 0x00, 0x58, 0x00, 0x59, 0x00, 0x5a, 0x00, 0x5b, 0x00, 0x5c, + 0x00, 0x5d, 0x00, 0x5e, 0x00, 0x5f, 0x00, 0x60, 0x00, 0x61, 0x00, 0x62, + 0x00, 0x63, 0x00, 0x64, 0x00, 0x65, 0x00, 0x66, 0x00, 0x67, 0x00, 0x68, + 0x00, 0x69, 0x00, 0x6a, 0x00, 0x6b, 0x00, 0x6c, 0x00, 0x6d, 0x00, 0x6e, + 0x00, 0x6f, 0x00, 0x70, 0x00, 0x71, 0x00, 0x72, 0x00, 0x73, 0x00, 0x74, + 0x00, 0x75, 0x00, 0x76, 0x00, 0x77, 0x00, 0x78, 0x00, 0x79, 0x00, 0x7a, + 0x00, 0x7b, 0x00, 0x7c, 0x00, 0x7d, 0x00, 0x7e, 0x00, 0x7f, 0x00, 0x80, + 0x00, 0x8d, 0x00, 0x9d, 0x00, 0x8b, 0x00, 0x9b, 0x00, 0x8f, 0x00, 0x9f, + 0x00, 0x92, 0x00, 0x93, 0x00, 0x83, 0x00, 0x94, 0x00, 0x95, 0x00, 0x85, + 0x00, 0x87, 0x00, 0x88, 0x00, 0x96, 0x00, 0x8c, 0x00, 0x9c, 0x00, 0x06, + 0x02, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x01, 0x00, 0x03, + 0x00, 0x04, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x08, 0x00, 0x09, + 0x00, 0x0a, 0x00, 0x0b, 0x00, 0x0c, 0x00, 0x0d, 0x00, 0x0e, 0x00, 0x02, + 0x00, 0x0f, 0x00, 0x10, 0x00, 0x11, 0x00, 0x12, 0x00, 0x13, 0x00, 0x14, + 0x00, 0x15, 0x00, 0x16, 0x00, 0x17, 0x00, 0x18, 0x00, 0x19, 0x00, 0x1a, + 0x00, 0x1b, 0x00, 0x1c, 0x00, 0x1d, 0x00, 0x1e, 0x00, 0x1f, 0x00, 0x20, + 0x00, 0x21, 0x00, 0x22, 0x00, 0x23, 0x00, 0x24, 0x00, 0x25, 0x00, 0x26, + 0x00, 0x27, 0x00, 0x28, 0x00, 0x29, 0x00, 0x2a, 0x00, 0x2b, 0x00, 0x2c, + 0x00, 0x2d, 0x00, 0x2e, 0x00, 0x2f, 0x00, 0x30, 0x00, 0x31, 0x00, 0x32, + 0x00, 0x33, 0x00, 0x34, 0x00, 0x35, 0x00, 0x36, 0x00, 0x37, 0x00, 0x38, + 0x00, 0x39, 0x00, 0x3a, 0x00, 0x3b, 0x00, 0x3c, 0x00, 0x3d, 0x00, 0x3e, + 0x00, 0x3f, 0x00, 0x40, 0x00, 0x41, 0x00, 0x42, 0x00, 0x43, 0x00, 0x44, + 0x00, 0x45, 0x00, 0x46, 0x00, 0x47, 0x00, 0x48, 0x00, 0x49, 0x00, 0x4a, + 0x00, 0x4b, 0x00, 0x4c, 0x00, 0x4d, 0x00, 0x4e, 0x00, 0x4f, 0x00, 0x50, + 0x00, 0x51, 0x00, 0x52, 0x00, 0x53, 0x00, 0x54, 0x00, 0x55, 0x00, 0x56, + 0x00, 0x57, 0x00, 0x58, 0x00, 0x59, 0x00, 0x5a, 0x00, 0x5b, 0x00, 0x5c, + 0x00, 0x5d, 0x00, 0x5e, 0x00, 0x5f, 0x00, 0x60, 0x00, 0x61, 0x00, 0x62, + 0x00, 0x63, 0x00, 0x64, 0x00, 0x65, 0x00, 0x66, 0x00, 0x67, 0x00, 0x68, + 0x00, 0x69, 0x00, 0x6a, 0x00, 0x6b, 0x00, 0x6c, 0x00, 0x6d, 0x00, 0x6e, + 0x00, 0x6f, 0x00, 0x70, 0x00, 0x71, 0x00, 0x72, 0x00, 0x73, 0x00, 0x74, + 0x00, 0x75, 0x00, 0x76, 0x00, 0x77, 0x00, 0x78, 0x00, 0x79, 0x00, 0x7a, + 0x00, 0x7b, 0x00, 0x7c, 0x00, 0x7d, 0x00, 0x7e, 0x00, 0x7f, 0x00, 0x80, + 0x00, 0xc5, 0x00, 0xc6, 0x00, 0xc8, 0x00, 0xca, 0x00, 0xd2, 0x00, 0xd7, + 0x00, 0xdd, 0x00, 0xe2, 0x00, 0xe1, 0x00, 0xe3, 0x00, 0xe5, 0x00, 0xe4, + 0x00, 0xe6, 0x00, 0xe8, 0x00, 0xea, 0x00, 0xe9, 0x00, 0xeb, 0x00, 0xec, + 0x00, 0xee, 0x00, 0xed, 0x00, 0xef, 0x00, 0xf0, 0x00, 0xf2, 0x00, 0xf4, + 0x00, 0xf3, 0x00, 0xf5, 0x00, 0xf7, 0x00, 0xf6, 0x00, 0xfb, 0x00, 0xfa, + 0x00, 0xfc, 0x00, 0xfd, 0x00, 0x87, 0x00, 0xb1, 0x00, 0xa3, 0x00, 0xa4, + 0x00, 0xa8, 0x00, 0x96, 0x00, 0xb7, 0x00, 0xe0, 0x00, 0xaf, 0x00, 0xaa, + 0x00, 0x9a, 0x00, 0xb5, 0x00, 0xa9, 0x00, 0x00, 0x00, 0xc7, 0x00, 0xd9, + 0x00, 0x00, 0x00, 0xb2, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa6, 0x00, 0xb6, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xab, + 0x00, 0xbb, 0x00, 0x00, 0x00, 0xe7, 0x00, 0xf9, 0x00, 0xc0, 0x00, 0xa2, + 0x00, 0xad, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00, 0xac, + 0x00, 0xbc, 0x00, 0x86, 0x00, 0xa1, 0x00, 0xc1, 0x00, 0xc4, 0x00, 0xd6, + 0x00, 0x8d, 0x00, 0x9d, 0x00, 0x97, 0x00, 0x98, 0x00, 0x94, 0x00, 0x95, + 0x00, 0x92, 0x00, 0x93, 0x00, 0xf8, 0x00, 0x00, 0x01, 0x00, 0x00, 0xa0, + 0x00, 0x00, 0x00, 0x81, 0x00, 0x8c, 0x00, 0x9c, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x88, 0x00, 0xb8, 0x00, 0x83, 0x00, 0x85, 0x00, 0x8a, 0x00, 0xc3, + 0x00, 0xcb, 0x00, 0xc2, 0x00, 0xcc, 0x00, 0xc9, 0x00, 0xce, 0x00, 0xcf, + 0x00, 0xd0, 0x00, 0xcd, 0x00, 0xd4, 0x00, 0xd5, 0x00, 0x00, 0x00, 0xd3, + 0x00, 0xdb, 0x00, 0xdc, 0x00, 0xda, 0x00, 0x00, 0x00, 0x89, 0x00, 0x99, + 0x00, 0xb0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xb9, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x1a, 0x01, 0xfb, 0x00, 0x00, 0x00, 0x2a, + 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, + 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, + 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, + 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, + 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, + 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x3c, 0x00, 0x5a, 0x00, 0x84, + 0x00, 0xb6, 0x01, 0x08, 0x01, 0x44, 0x01, 0x56, 0x01, 0x6e, 0x01, 0x86, + 0x01, 0xb8, 0x01, 0xce, 0x01, 0xe0, 0x01, 0xee, 0x01, 0xfa, 0x02, 0x16, + 0x02, 0x3a, 0x02, 0x50, 0x02, 0x7c, 0x02, 0xa0, 0x02, 0xc2, 0x02, 0xe4, + 0x03, 0x0e, 0x03, 0x30, 0x03, 0x5c, 0x03, 0x84, 0x03, 0x96, 0x03, 0xac, + 0x03, 0xce, 0x03, 0xe2, 0x04, 0x02, 0x04, 0x2a, 0x04, 0x5e, 0x04, 0x7c, + 0x04, 0xa0, 0x04, 0xc8, 0x04, 0xe4, 0x04, 0xfc, 0x05, 0x12, 0x05, 0x30, + 0x05, 0x48, 0x05, 0x5e, 0x05, 0x76, 0x05, 0xa0, 0x05, 0xb0, 0x05, 0xd2, + 0x05, 0xf4, 0x06, 0x12, 0x06, 0x2e, 0x06, 0x5c, 0x06, 0x7e, 0x06, 0xac, + 0x06, 0xbe, 0x06, 0xd8, 0x06, 0xfa, 0x07, 0x28, 0x07, 0x5e, 0x07, 0x82, + 0x07, 0xaa, 0x07, 0xbc, 0x07, 0xd8, 0x07, 0xea, 0x08, 0x02, 0x08, 0x0e, + 0x08, 0x20, 0x08, 0x42, 0x08, 0x5e, 0x08, 0x80, 0x08, 0x9e, 0x08, 0xc0, + 0x08, 0xdc, 0x08, 0xfe, 0x09, 0x1a, 0x09, 0x34, 0x09, 0x4c, 0x09, 0x76, + 0x09, 0x88, 0x09, 0xac, 0x09, 0xc8, 0x09, 0xe6, 0x0a, 0x02, 0x0a, 0x1e, + 0x0a, 0x36, 0x0a, 0x5e, 0x0a, 0x76, 0x0a, 0x92, 0x0a, 0xb4, 0x0a, 0xd6, + 0x0b, 0x0c, 0x0b, 0x2c, 0x0b, 0x4e, 0x0b, 0x7a, 0x0b, 0x88, 0x0b, 0xb2, + 0x0b, 0xce, 0x0b, 0xce, 0x0c, 0x06, 0x0c, 0x06, 0x0c, 0x18, 0x0c, 0x38, + 0x0c, 0x54, 0x0c, 0x6c, 0x0c, 0x82, 0x0c, 0xa0, 0x0c, 0xc2, 0x0d, 0x14, + 0x0d, 0x48, 0x0d, 0x60, 0x0d, 0x88, 0x0d, 0x88, 0x0d, 0xb2, 0x0d, 0xb2, + 0x0d, 0xb2, 0x0d, 0xc4, 0x0d, 0xd6, 0x0d, 0xf2, 0x0e, 0x0e, 0x0e, 0x24, + 0x0e, 0x32, 0x0e, 0x40, 0x0e, 0x5c, 0x0e, 0x88, 0x0e, 0xbc, 0x0e, 0xd2, + 0x0e, 0xfa, 0x0e, 0xfa, 0x0f, 0x28, 0x0f, 0x56, 0x0f, 0x56, 0x0f, 0x68, + 0x0f, 0x9c, 0x0f, 0xd0, 0x10, 0x08, 0x10, 0x40, 0x10, 0x5a, 0x10, 0x90, + 0x10, 0xaa, 0x10, 0xf6, 0x11, 0x1a, 0x11, 0x48, 0x11, 0x5e, 0x11, 0x70, + 0x11, 0xb0, 0x11, 0xc4, 0x11, 0xe6, 0x12, 0x06, 0x12, 0x2a, 0x12, 0x50, + 0x12, 0x68, 0x12, 0x90, 0x12, 0xb0, 0x12, 0xc2, 0x12, 0xda, 0x12, 0xf6, + 0x13, 0x18, 0x13, 0x44, 0x13, 0x86, 0x13, 0xca, 0x14, 0x1a, 0x14, 0x48, + 0x14, 0x70, 0x14, 0x98, 0x14, 0xc8, 0x14, 0xfe, 0x15, 0x26, 0x15, 0x4a, + 0x15, 0x76, 0x15, 0xa2, 0x15, 0xc0, 0x15, 0xe2, 0x16, 0x0a, 0x16, 0x2e, + 0x16, 0x50, 0x16, 0x72, 0x16, 0x9c, 0x16, 0xc0, 0x16, 0xea, 0x17, 0x24, + 0x17, 0x52, 0x17, 0x80, 0x17, 0xb6, 0x17, 0xf2, 0x18, 0x22, 0x18, 0x5e, + 0x18, 0xa4, 0x18, 0xca, 0x18, 0xee, 0x19, 0x20, 0x19, 0x4c, 0x19, 0x7e, + 0x19, 0xa0, 0x19, 0xcc, 0x19, 0xf4, 0x1a, 0x1c, 0x1a, 0x4c, 0x1a, 0x82, + 0x1a, 0xaa, 0x1a, 0xce, 0x1a, 0xf8, 0x1b, 0x24, 0x1b, 0x42, 0x1b, 0x64, + 0x1b, 0x8c, 0x1b, 0xb0, 0x1b, 0xd2, 0x1b, 0xf4, 0x1c, 0x1e, 0x1c, 0x42, + 0x1c, 0x70, 0x1c, 0xaa, 0x1c, 0xd8, 0x1d, 0x06, 0x1d, 0x3c, 0x1d, 0x78, + 0x1d, 0xa8, 0x1d, 0xc8, 0x1e, 0x0e, 0x1e, 0x34, 0x1e, 0x58, 0x1e, 0x8a, + 0x1e, 0xb6, 0x1e, 0xe8, 0x1f, 0x0a, 0x1f, 0x3e, 0x1f, 0x64, 0x1f, 0x7a, + 0x1f, 0x92, 0x1f, 0xba, 0x1f, 0xe4, 0x00, 0x00, 0x00, 0x02, 0x00, 0x1a, + 0x00, 0x00, 0x00, 0xec, 0x02, 0x15, 0x00, 0x03, 0x00, 0x07, 0x00, 0x2e, + 0xb1, 0x01, 0x00, 0x2f, 0x3c, 0xb2, 0x07, 0x04, 0x00, 0xed, 0x32, 0xb1, + 0x06, 0x05, 0xdc, 0x3c, 0xb2, 0x03, 0x02, 0x00, 0xed, 0x32, 0x00, 0xb1, + 0x03, 0x00, 0x2f, 0x3c, 0xb2, 0x05, 0x04, 0x00, 0xed, 0x32, 0xb2, 0x07, + 0x06, 0x01, 0xfc, 0x3c, 0xb2, 0x01, 0x02, 0x00, 0xed, 0x32, 0x33, 0x11, + 0x33, 0x11, 0x27, 0x33, 0x11, 0x23, 0x1a, 0xd2, 0xb8, 0x9e, 0x9e, 0x02, + 0x15, 0xfd, 0xeb, 0x1a, 0x01, 0xe1, 0x00, 0x00, 0x00, 0x02, 0x01, 0x2c, + 0x00, 0x00, 0x01, 0x90, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, + 0x01, 0x33, 0x11, 0x23, 0x15, 0x33, 0x15, 0x23, 0x01, 0x2c, 0x64, 0x64, + 0x64, 0x64, 0x02, 0xbc, 0xfe, 0x0c, 0x64, 0x64, 0x00, 0x04, 0x00, 0xc8, + 0x01, 0x2c, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x37, 0x35, 0x33, 0x15, 0x21, 0x15, 0x23, 0x35, 0x01, 0x2c, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x02, 0x58, 0xc8, 0x64, + 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0x58, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, + 0x13, 0x33, 0x15, 0x33, 0x35, 0x33, 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, + 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, 0x15, 0x23, 0x35, 0x23, 0x35, 0x33, + 0x35, 0x23, 0x35, 0x33, 0x17, 0x15, 0x33, 0x35, 0xc8, 0x64, 0xc8, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0x02, 0x58, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0xc8, 0x00, 0x00, 0x05, 0x00, 0x64, + 0xff, 0x9c, 0x02, 0x58, 0x02, 0x58, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, + 0x00, 0x1f, 0x00, 0x23, 0x00, 0x00, 0x13, 0x21, 0x15, 0x23, 0x15, 0x33, + 0x15, 0x23, 0x15, 0x33, 0x15, 0x21, 0x35, 0x33, 0x35, 0x23, 0x35, 0x33, + 0x35, 0x23, 0x21, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x23, 0x21, 0x23, + 0x35, 0x33, 0x35, 0x23, 0x35, 0x33, 0xc8, 0x01, 0x2c, 0x64, 0x64, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0x64, 0x64, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0x64, 0xc8, 0x64, + 0xc8, 0x64, 0x64, 0xc8, 0x64, 0xc8, 0x64, 0xc8, 0xc8, 0x64, 0xc8, 0xc8, + 0x00, 0x0f, 0x00, 0x64, 0xff, 0x9c, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x27, 0x00, 0x2b, 0x00, 0x2f, + 0x00, 0x33, 0x00, 0x37, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, + 0x33, 0x15, 0x3d, 0x01, 0x33, 0x15, 0x03, 0x33, 0x15, 0x23, 0x37, 0x35, + 0x33, 0x15, 0x31, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x27, 0x15, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x13, 0x15, 0x23, 0x35, 0x31, 0x23, + 0x35, 0x33, 0x01, 0x3c, 0x74, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, + 0x64, 0xc8, 0xc8, 0xc8, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0xfe, 0x70, 0xc8, + 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0xc8, 0x64, 0x64, + 0x01, 0x90, 0x64, 0x64, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, + 0x00, 0x27, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x31, 0x15, 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x37, 0x35, 0x33, 0x15, 0x07, 0x33, 0x15, 0x23, 0x27, 0x15, 0x21, 0x35, + 0x31, 0x23, 0x35, 0x33, 0x35, 0x23, 0x35, 0x33, 0xc8, 0xc8, 0xc8, 0xc8, + 0x64, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, + 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0xc8, 0x64, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, 0x2c, + 0x01, 0x2c, 0x01, 0xf4, 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, + 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x01, 0x90, 0x64, 0x64, + 0x64, 0x02, 0x58, 0xc8, 0x64, 0x64, 0x00, 0x00, 0x00, 0x03, 0x01, 0x2c, + 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x31, 0x11, 0x23, 0x11, 0x13, 0x33, + 0x15, 0x23, 0x01, 0x90, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, + 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0xfe, 0x0c, 0x64, 0x00, 0x03, 0x01, 0x2c, + 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x11, 0x23, 0x31, 0x15, + 0x23, 0x35, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, + 0x64, 0xfe, 0x0c, 0x64, 0x64, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x64, + 0x00, 0x64, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, + 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, + 0x07, 0x33, 0x15, 0x23, 0x27, 0x15, 0x23, 0x35, 0x31, 0x23, 0x35, 0x33, + 0x07, 0x15, 0x23, 0x35, 0x37, 0x23, 0x35, 0x33, 0x01, 0x2c, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0xc8, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, + 0x00, 0x01, 0x00, 0x64, 0x00, 0x64, 0x02, 0x58, 0x02, 0x58, 0x00, 0x0b, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x33, 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, + 0x35, 0x33, 0x01, 0x2c, 0x64, 0xc8, 0xc8, 0x64, 0xc8, 0xc8, 0x02, 0x58, + 0xc8, 0x64, 0xc8, 0xc8, 0x64, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, 0x2c, + 0xff, 0x9c, 0x01, 0xf4, 0x00, 0xc8, 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, + 0x25, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x01, 0x90, 0x64, 0x64, + 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0xc8, + 0x00, 0xc8, 0x02, 0x58, 0x01, 0x2c, 0x00, 0x03, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x21, 0xc8, 0x01, 0x90, 0xfe, 0x70, 0x01, 0x2c, 0x64, 0x00, 0x00, + 0x00, 0x01, 0x01, 0x2c, 0x00, 0x00, 0x01, 0x90, 0x00, 0x64, 0x00, 0x03, + 0x00, 0x00, 0x25, 0x33, 0x15, 0x23, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, + 0x00, 0x04, 0x00, 0xc8, 0xff, 0x9c, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, + 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, + 0x01, 0xf4, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0xc8, 0xc8, 0xc8, + 0xc8, 0xc8, 0xc8, 0xc8, 0xc8, 0xc8, 0x00, 0x00, 0x00, 0x05, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, + 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, 0x17, 0x33, + 0x11, 0x23, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, + 0xd4, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xfe, 0x0c, 0x64, + 0x64, 0x01, 0xf4, 0x64, 0xfe, 0xd4, 0x00, 0x00, 0x00, 0x01, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x0b, 0x00, 0x00, 0x01, 0x33, + 0x11, 0x33, 0x15, 0x21, 0x35, 0x33, 0x11, 0x23, 0x35, 0x33, 0x01, 0x2c, + 0x64, 0xc8, 0xfe, 0x0c, 0xc8, 0xc8, 0xc8, 0x02, 0xbc, 0xfd, 0xa8, 0x64, + 0x64, 0x01, 0x90, 0x64, 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x1d, 0x01, 0x21, 0x15, + 0x21, 0x35, 0x33, 0x35, 0x11, 0x15, 0x23, 0x35, 0xc8, 0x01, 0x2c, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0x0c, 0x64, + 0x64, 0x02, 0xbc, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x01, 0x90, 0x64, 0x64, 0x00, 0x00, 0x05, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, + 0x15, 0x23, 0x31, 0x15, 0x21, 0x35, 0x05, 0x33, 0x15, 0x23, 0x31, 0x15, + 0x21, 0x35, 0x64, 0x01, 0x90, 0xfe, 0x70, 0x01, 0x90, 0x64, 0x64, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0x70, 0x02, 0xbc, 0x64, 0xc8, 0x64, + 0x64, 0x64, 0xc8, 0x64, 0x64, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x11, 0x00, 0x15, 0x00, 0x00, + 0x01, 0x33, 0x11, 0x33, 0x15, 0x23, 0x15, 0x23, 0x35, 0x21, 0x35, 0x33, + 0x15, 0x33, 0x35, 0x23, 0x35, 0x33, 0x07, 0x35, 0x33, 0x15, 0x01, 0x90, + 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x64, + 0x02, 0xbc, 0xfe, 0x70, 0x64, 0xc8, 0xc8, 0xc8, 0x64, 0xc8, 0x64, 0xc8, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, + 0x13, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x35, 0x33, 0x64, 0x01, 0xf4, 0xfe, + 0x70, 0x01, 0x2c, 0xfe, 0x70, 0x01, 0x90, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0x64, 0x02, 0xbc, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x00, + 0x00, 0x05, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x21, 0x15, 0x21, 0x15, + 0x23, 0x11, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x21, 0x35, 0xc8, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x01, 0x2c, 0xfe, + 0xd4, 0x64, 0x01, 0x90, 0x64, 0x64, 0xfe, 0xd4, 0x02, 0xbc, 0x64, 0x64, + 0x64, 0xc8, 0x64, 0xc8, 0x01, 0xf4, 0xfe, 0xd4, 0xc8, 0x64, 0x64, 0x00, + 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x21, 0x11, 0x23, + 0x35, 0x21, 0x15, 0x23, 0x05, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, + 0x1d, 0x01, 0x23, 0x35, 0x64, 0x01, 0xf4, 0x64, 0xfe, 0xd4, 0x64, 0x01, + 0x90, 0x64, 0x64, 0x64, 0x02, 0xbc, 0xfe, 0xd4, 0xc8, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x00, 0x00, 0x07, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x21, 0x35, 0x05, 0x33, + 0x15, 0x23, 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x35, 0x33, 0x35, 0x23, + 0x35, 0x33, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x02, + 0xbc, 0x64, 0xc8, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0xc8, + 0x00, 0x05, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, 0x35, 0x21, 0x35, 0x21, 0x1d, 0x01, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x11, 0x23, 0x35, 0x33, 0xc8, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0xc8, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xfe, 0x70, 0x64, 0x64, 0xc8, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xc8, 0x00, 0x00, 0x02, 0x01, 0x2c, + 0x00, 0x64, 0x01, 0x90, 0x01, 0xf4, 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, + 0x01, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x23, 0x01, 0x2c, 0x64, 0x64, + 0x64, 0x64, 0x01, 0xf4, 0x64, 0xc8, 0x64, 0x00, 0x00, 0x03, 0x01, 0x2c, + 0xff, 0x9c, 0x01, 0xf4, 0x01, 0x90, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x23, 0x31, 0x15, + 0x23, 0x35, 0x01, 0x90, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x90, 0x64, + 0x64, 0xc8, 0x64, 0x64, 0x00, 0x05, 0x00, 0xc8, 0x00, 0x64, 0x01, 0xf4, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, + 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x01, 0x90, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, + 0x00, 0x02, 0x00, 0x64, 0x00, 0xc8, 0x02, 0xbc, 0x01, 0xf4, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x64, 0x02, 0x58, 0xfd, 0xa8, 0x02, 0x58, 0xfd, 0xa8, 0x01, 0xf4, 0x64, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x05, 0x01, 0x2c, 0x00, 0x64, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x01, 0x2c, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x06, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, + 0x11, 0x15, 0x23, 0x35, 0x13, 0x33, 0x15, 0x23, 0xc8, 0x01, 0x2c, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x02, + 0xbc, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0x64, 0x64, + 0xfe, 0x0c, 0x64, 0x00, 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x03, 0x20, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x11, 0x00, 0x15, 0x00, 0x19, + 0x00, 0x1d, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x33, 0x35, 0x23, 0x35, 0x33, 0x15, 0x01, 0x11, + 0x23, 0x11, 0x13, 0x21, 0x15, 0x21, 0x25, 0x35, 0x33, 0x15, 0xc8, 0x01, + 0x90, 0xfe, 0x70, 0x01, 0x90, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, + 0xfe, 0xd4, 0x64, 0x64, 0x01, 0xf4, 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0x02, + 0xbc, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x01, 0x2c, 0xfe, + 0x0c, 0x01, 0xf4, 0xfe, 0x0c, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, + 0x00, 0x02, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x0f, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, + 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x11, 0x21, 0xc8, 0x01, 0x2c, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x02, + 0xbc, 0x64, 0xfd, 0xa8, 0xc8, 0xc8, 0x02, 0x58, 0xfe, 0xd4, 0x00, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x01, 0x33, 0x15, 0x23, 0x15, 0x33, + 0x15, 0x23, 0x64, 0x01, 0x90, 0xfe, 0xd4, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0xfe, 0x70, 0x01, 0x90, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, + 0xc8, 0x64, 0xc8, 0x64, 0x02, 0x58, 0xc8, 0x64, 0xc8, 0x00, 0x00, 0x00, + 0x00, 0x05, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x11, 0x23, 0x11, 0x13, 0x21, + 0x15, 0x21, 0x25, 0x35, 0x33, 0x15, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x02, 0xbc, 0x64, 0x64, 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0xfe, + 0x0c, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x09, 0x00, 0x0d, 0x00, 0x00, + 0x11, 0x21, 0x15, 0x21, 0x11, 0x21, 0x15, 0x21, 0x11, 0x23, 0x21, 0x33, + 0x11, 0x23, 0x01, 0xf4, 0xfe, 0xd4, 0x01, 0x2c, 0xfe, 0x70, 0x64, 0x01, + 0xf4, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xfe, 0x0c, 0x64, 0x02, 0x58, 0xfe, + 0x0c, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x0b, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x64, 0x01, 0xf4, 0xfe, 0x70, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x90, 0xfe, 0x0c, 0x02, 0xbc, 0x64, 0xc8, 0x64, + 0xc8, 0x64, 0x00, 0x00, 0x00, 0x01, 0x00, 0xc8, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x09, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x15, 0x21, 0x11, 0x23, 0xc8, 0x01, 0xf4, 0xfe, 0x70, 0x01, 0x2c, 0xfe, + 0xd4, 0x64, 0x02, 0xbc, 0x64, 0xc8, 0x64, 0xfe, 0xd4, 0x00, 0x00, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0f, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x31, 0x11, + 0x23, 0x11, 0x13, 0x21, 0x35, 0x23, 0x35, 0x33, 0x11, 0x21, 0xc8, 0x01, + 0x90, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x2c, 0x64, 0xc8, 0xfe, 0x70, 0x02, + 0xbc, 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0xfe, 0x0c, 0xc8, 0x64, 0xfe, 0x70, + 0x00, 0x01, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x0b, + 0x00, 0x00, 0x13, 0x33, 0x11, 0x21, 0x11, 0x33, 0x11, 0x23, 0x11, 0x21, + 0x11, 0x23, 0x64, 0x64, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x02, + 0xbc, 0xfe, 0xd4, 0x01, 0x2c, 0xfd, 0x44, 0x01, 0x2c, 0xfe, 0xd4, 0x00, + 0x00, 0x01, 0x00, 0xc8, 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x0b, + 0x00, 0x00, 0x13, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, 0x35, 0x33, + 0x11, 0x23, 0xc8, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x02, + 0xbc, 0x64, 0xfe, 0x0c, 0x64, 0x64, 0x01, 0xf4, 0x00, 0x02, 0x00, 0x64, + 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, + 0x13, 0x21, 0x15, 0x23, 0x11, 0x23, 0x11, 0x23, 0x13, 0x15, 0x23, 0x35, + 0x64, 0x01, 0x90, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x02, 0xbc, 0x64, 0xfe, + 0x0c, 0x01, 0xf4, 0xfe, 0x0c, 0x64, 0x64, 0x00, 0x00, 0x05, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x09, 0x00, 0x0d, 0x00, 0x11, + 0x00, 0x15, 0x00, 0x19, 0x00, 0x00, 0x13, 0x33, 0x11, 0x33, 0x35, 0x33, + 0x15, 0x23, 0x11, 0x23, 0x01, 0x35, 0x33, 0x15, 0x3d, 0x01, 0x33, 0x15, + 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0x64, 0x01, 0x2c, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x02, 0xbc, 0xfe, 0xd4, 0x64, 0xc8, 0xfe, 0xd4, 0x01, 0xf4, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xfe, 0xd4, 0xc8, 0x64, 0x00, 0x00, 0x01, 0x00, 0xc8, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x05, 0x00, 0x00, 0x13, 0x33, + 0x11, 0x21, 0x15, 0x21, 0xc8, 0x64, 0x01, 0x2c, 0xfe, 0x70, 0x02, 0xbc, + 0xfd, 0xa8, 0x64, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x13, 0x00, 0x00, 0x11, 0x33, + 0x15, 0x33, 0x15, 0x23, 0x11, 0x23, 0x13, 0x33, 0x15, 0x23, 0x37, 0x35, + 0x33, 0x35, 0x33, 0x11, 0x23, 0x11, 0x64, 0x64, 0x64, 0x64, 0xc8, 0xc8, + 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x02, 0xbc, 0xc8, 0x64, 0xfe, 0x70, 0x01, + 0x90, 0x64, 0x64, 0x64, 0xc8, 0xfd, 0x44, 0x01, 0x90, 0x00, 0x00, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x13, 0x00, 0x00, 0x13, 0x33, 0x15, 0x33, 0x15, 0x23, + 0x11, 0x23, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x11, 0x33, 0x11, 0x23, + 0x11, 0x23, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0x64, + 0x01, 0x2c, 0xfd, 0x44, 0x01, 0x2c, 0x00, 0x00, 0x00, 0x04, 0x00, 0xc8, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x00, 0x01, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, 0x01, 0x2c, 0x01, 0x2c, + 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x02, 0xbc, + 0x64, 0xfe, 0x0c, 0x64, 0x64, 0x01, 0xf4, 0x00, 0x00, 0x02, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x09, 0x00, 0x0d, 0x00, 0x00, + 0x13, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x11, 0x23, 0x01, 0x33, + 0x15, 0x23, 0x64, 0x01, 0x90, 0xfe, 0xd4, 0x01, 0x2c, 0xfe, 0xd4, 0x64, + 0x01, 0x90, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xc8, 0x64, 0xfe, 0xd4, 0x02, + 0x58, 0xc8, 0x00, 0x00, 0x00, 0x07, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, + 0x11, 0x23, 0x31, 0x15, 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x27, 0x15, + 0x23, 0x35, 0x31, 0x23, 0x11, 0x33, 0x13, 0x23, 0x35, 0x33, 0xc8, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xfe, 0x70, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0xf4, 0xfe, 0x70, 0x64, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x15, 0x23, 0x35, 0x23, 0x11, 0x23, 0x01, 0x33, 0x15, 0x23, 0x15, 0x33, + 0x15, 0x23, 0x64, 0x01, 0x90, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0xc8, 0x64, + 0x01, 0x90, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xc8, 0xc8, 0x64, + 0xfe, 0xd4, 0x02, 0x58, 0xc8, 0xc8, 0xc8, 0x00, 0x00, 0x07, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, 0x17, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, + 0x35, 0x33, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, + 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, + 0xd4, 0x64, 0x64, 0x02, 0xbc, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x64, + 0xc8, 0x64, 0x64, 0x64, 0x00, 0x01, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x07, 0x00, 0x00, 0x13, 0x21, 0x15, 0x23, 0x11, 0x23, + 0x11, 0x23, 0x64, 0x01, 0xf4, 0xc8, 0x64, 0xc8, 0x02, 0xbc, 0x64, 0xfd, + 0xa8, 0x02, 0x58, 0x00, 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x13, 0x33, + 0x11, 0x23, 0x33, 0x21, 0x15, 0x21, 0x25, 0x11, 0x33, 0x11, 0x64, 0x64, + 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x02, 0xbc, 0xfd, + 0xa8, 0x64, 0x64, 0x02, 0x58, 0xfd, 0xa8, 0x00, 0x00, 0x05, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x33, 0x11, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x35, 0x11, + 0x33, 0x11, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x02, 0xbc, 0xfe, 0x70, 0xc8, 0x64, 0x64, 0xc8, 0xc8, 0xc8, + 0x01, 0x90, 0xfe, 0x70, 0x00, 0x07, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x11, 0x33, 0x11, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x35, 0x11, 0x33, 0x11, 0x31, 0x33, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x11, 0x33, 0x11, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x02, 0xbc, 0xfd, 0xa8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, + 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x02, 0x58, 0xfd, 0xa8, 0x00, 0x00, 0x00, + 0x00, 0x09, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, + 0x00, 0x1f, 0x00, 0x23, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x3d, 0x01, + 0x33, 0x15, 0x07, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x03, 0x15, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0x64, 0x64, 0x02, 0xbc, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0xc8, 0xc8, 0x64, 0xc8, 0x01, 0x2c, 0x64, 0x64, 0x64, 0xc8, 0xc8, + 0x00, 0x05, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x33, + 0x11, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, 0x11, 0x23, 0x13, 0x35, + 0x33, 0x15, 0x35, 0x11, 0x33, 0x11, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0xfe, 0xd4, 0x64, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x00, 0x00, + 0x00, 0x05, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x05, + 0x00, 0x09, 0x00, 0x0d, 0x00, 0x11, 0x00, 0x17, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x23, 0x35, 0x21, 0x05, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, + 0x1d, 0x01, 0x23, 0x35, 0x1d, 0x01, 0x21, 0x15, 0x21, 0x35, 0x64, 0x01, + 0xf4, 0x64, 0xfe, 0x70, 0x01, 0x90, 0x64, 0x64, 0x64, 0x01, 0x90, 0xfe, + 0x0c, 0x02, 0xbc, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x00, 0x00, 0x00, 0x01, 0x01, 0x2c, + 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x07, 0x00, 0x00, 0x01, 0x33, + 0x15, 0x23, 0x11, 0x33, 0x15, 0x23, 0x01, 0x2c, 0xc8, 0x64, 0x64, 0xc8, + 0x02, 0xbc, 0x64, 0xfe, 0x0c, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0xc8, + 0xff, 0x9c, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0xc8, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0xc8, 0xc8, + 0xc8, 0xc8, 0x00, 0x00, 0x00, 0x01, 0x01, 0x2c, 0x00, 0x00, 0x01, 0xf4, + 0x02, 0xbc, 0x00, 0x07, 0x00, 0x00, 0x01, 0x33, 0x11, 0x23, 0x35, 0x33, + 0x11, 0x23, 0x01, 0x2c, 0xc8, 0xc8, 0x64, 0x64, 0x02, 0xbc, 0xfd, 0x44, + 0x64, 0x01, 0xf4, 0x00, 0x00, 0x03, 0x00, 0xc8, 0x01, 0x90, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x01, 0x33, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x27, 0x15, 0x23, 0x35, 0x01, 0x2c, + 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x02, 0x58, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x01, 0x00, 0x64, 0xff, 0x9c, 0x02, 0xbc, + 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x33, 0x21, 0x15, 0x21, 0x64, 0x02, + 0x58, 0xfd, 0xa8, 0x64, 0x00, 0x02, 0x01, 0x2c, 0x01, 0x2c, 0x01, 0xf4, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, + 0x3b, 0x01, 0x15, 0x23, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, + 0x58, 0x64, 0xc8, 0x00, 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x0d, 0x00, 0x11, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x11, 0x21, 0x35, 0x21, 0x35, 0x21, 0x35, 0x21, + 0x01, 0x23, 0x35, 0x33, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0xfe, 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0xfe, 0xd4, 0x64, 0x64, + 0x02, 0x58, 0x64, 0xfe, 0x0c, 0x64, 0xc8, 0x64, 0xfe, 0xd4, 0xc8, 0x00, + 0x00, 0x02, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x09, + 0x00, 0x0d, 0x00, 0x00, 0x13, 0x33, 0x15, 0x21, 0x15, 0x21, 0x11, 0x21, + 0x15, 0x21, 0x01, 0x33, 0x11, 0x23, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, + 0x01, 0x2c, 0xfe, 0x70, 0x01, 0x90, 0x64, 0x64, 0x02, 0xbc, 0xc8, 0x64, + 0xfe, 0xd4, 0x64, 0x01, 0x90, 0xfe, 0xd4, 0x00, 0x00, 0x04, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x25, 0x11, 0x23, 0x11, 0x13, 0x21, 0x15, 0x21, 0xc8, 0x01, 0x2c, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x90, 0xfe, + 0x70, 0x02, 0x58, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x01, 0x90, 0xfe, 0x70, + 0x64, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x09, 0x00, 0x0d, 0x00, 0x00, 0x01, 0x33, 0x11, 0x21, + 0x35, 0x21, 0x11, 0x21, 0x35, 0x21, 0x01, 0x23, 0x11, 0x33, 0x01, 0xf4, + 0x64, 0xfe, 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0xfe, 0xd4, 0x64, + 0x64, 0x02, 0xbc, 0xfd, 0x44, 0x64, 0x01, 0x2c, 0x64, 0xfe, 0x70, 0x01, + 0x2c, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x0d, 0x00, 0x11, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x15, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, + 0x01, 0x21, 0x15, 0x21, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0xfe, 0x70, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x90, 0xfe, 0x70, + 0x02, 0x58, 0x64, 0xc8, 0xc8, 0x01, 0x90, 0x64, 0xfe, 0xd4, 0x64, 0x00, + 0x00, 0x02, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x0f, 0x00, 0x00, 0x01, 0x21, 0x15, 0x21, 0x31, 0x15, 0x33, 0x15, + 0x23, 0x11, 0x23, 0x11, 0x23, 0x35, 0x33, 0x35, 0x01, 0x2c, 0x01, 0x2c, + 0xfe, 0xd4, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xc8, 0x64, + 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0xc8, 0x00, 0x00, 0x00, 0x03, 0x00, 0x64, + 0xff, 0x9c, 0x02, 0x58, 0x02, 0x58, 0x00, 0x09, 0x00, 0x0d, 0x00, 0x11, + 0x00, 0x00, 0x13, 0x21, 0x11, 0x23, 0x35, 0x21, 0x35, 0x21, 0x11, 0x21, + 0x01, 0x15, 0x21, 0x3d, 0x01, 0x23, 0x11, 0x33, 0xc8, 0x01, 0x90, 0x64, + 0xfe, 0xd4, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0xfe, 0xd4, 0x64, 0x64, + 0x02, 0x58, 0xfd, 0xa8, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0x0c, 0x64, 0x64, + 0xc8, 0x01, 0x2c, 0x00, 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x00, 0x13, 0x33, + 0x11, 0x33, 0x15, 0x23, 0x11, 0x23, 0x13, 0x35, 0x33, 0x15, 0x31, 0x33, + 0x11, 0x23, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x02, + 0xbc, 0xfe, 0xd4, 0x64, 0xfe, 0xd4, 0x01, 0x90, 0x64, 0x64, 0xfe, 0x70, + 0x00, 0x02, 0x00, 0xc8, 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x0d, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x07, 0x33, 0x11, 0x33, + 0x15, 0x21, 0x35, 0x33, 0x11, 0x23, 0x01, 0x2c, 0x64, 0x64, 0x64, 0xc8, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x02, 0xbc, 0x64, 0x64, 0xfe, 0x70, 0x64, + 0x64, 0x01, 0x2c, 0x00, 0x00, 0x02, 0x00, 0xc8, 0xff, 0x9c, 0x01, 0xf4, + 0x02, 0x58, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x13, 0x21, 0x15, 0x23, + 0x11, 0x23, 0x11, 0x23, 0x13, 0x15, 0x23, 0x35, 0xc8, 0x01, 0x2c, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0xfe, + 0x0c, 0x64, 0x64, 0x00, 0x00, 0x05, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x0a, 0x00, 0x0e, 0x00, 0x12, 0x00, 0x16, 0x00, 0x1a, + 0x00, 0x00, 0x13, 0x33, 0x11, 0x33, 0x35, 0x33, 0x1d, 0x01, 0x23, 0x15, + 0x23, 0x01, 0x35, 0x33, 0x15, 0x3d, 0x01, 0x33, 0x15, 0x03, 0x33, 0x15, + 0x23, 0x3b, 0x01, 0x15, 0x23, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x01, + 0x2c, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0xfe, + 0xd4, 0x64, 0x64, 0x64, 0xc8, 0x01, 0x90, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xfe, 0xd4, 0x64, 0x64, 0x00, 0x02, 0x00, 0xc8, 0x00, 0x00, 0x01, 0xf4, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, 0x13, 0x33, 0x11, 0x23, + 0x3b, 0x01, 0x15, 0x23, 0xc8, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x02, 0x58, + 0xfe, 0x0c, 0x64, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x11, 0x23, 0x13, 0x35, + 0x33, 0x15, 0x31, 0x33, 0x11, 0x23, 0x01, 0x11, 0x23, 0x11, 0x64, 0xc8, + 0xc8, 0xc8, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xfe, 0x0c, 0x64, 0x02, + 0x58, 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0x64, 0xfe, 0x0c, 0x01, 0xf4, + 0xfe, 0x0c, 0x01, 0xf4, 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0d, 0x00, 0x00, 0x01, 0x33, + 0x15, 0x23, 0x3b, 0x01, 0x11, 0x23, 0x03, 0x15, 0x23, 0x11, 0x23, 0x11, + 0x01, 0x2c, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x02, 0x58, + 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0xfe, 0x70, 0x01, 0xf4, 0x00, 0x00, + 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, + 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0x64, 0x02, 0x58, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0x00, 0x00, + 0x00, 0x02, 0x00, 0xc8, 0xff, 0x9c, 0x02, 0xbc, 0x02, 0x58, 0x00, 0x09, + 0x00, 0x0d, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x11, 0x21, 0x15, 0x21, + 0x15, 0x23, 0x01, 0x33, 0x11, 0x23, 0xc8, 0x01, 0x90, 0xfe, 0xd4, 0x01, + 0x2c, 0xfe, 0xd4, 0x64, 0x01, 0x90, 0x64, 0x64, 0x02, 0x58, 0x64, 0xfe, + 0xd4, 0x64, 0xc8, 0x02, 0x58, 0xfe, 0xd4, 0x00, 0x00, 0x02, 0x00, 0x64, + 0xff, 0x9c, 0x02, 0x58, 0x02, 0x58, 0x00, 0x09, 0x00, 0x0d, 0x00, 0x00, + 0x13, 0x21, 0x11, 0x23, 0x35, 0x21, 0x35, 0x21, 0x11, 0x21, 0x11, 0x23, + 0x11, 0x33, 0xc8, 0x01, 0x90, 0x64, 0xfe, 0xd4, 0x01, 0x2c, 0xfe, 0xd4, + 0x64, 0x64, 0x02, 0x58, 0xfd, 0x44, 0xc8, 0x64, 0x01, 0x2c, 0xfe, 0xd4, + 0x01, 0x2c, 0x00, 0x00, 0x00, 0x02, 0x00, 0xc8, 0x00, 0x00, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x13, 0x33, 0x15, 0x33, + 0x15, 0x23, 0x11, 0x23, 0x13, 0x35, 0x33, 0x15, 0xc8, 0x64, 0x64, 0x64, + 0x64, 0xc8, 0xc8, 0x02, 0x58, 0x64, 0x64, 0xfe, 0x70, 0x01, 0xf4, 0x64, + 0x64, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x31, 0x15, 0x23, 0x35, + 0x17, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x21, 0x35, + 0x31, 0x23, 0x35, 0x33, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x64, 0x64, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x02, + 0x58, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x02, 0x00, 0xc8, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x00, 0x13, 0x33, 0x15, 0x21, 0x15, 0x21, 0x11, 0x23, + 0x33, 0x21, 0x15, 0x21, 0xc8, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x64, 0x64, + 0x01, 0x2c, 0xfe, 0xd4, 0x02, 0xbc, 0xc8, 0x64, 0xfe, 0xd4, 0x64, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0f, 0x00, 0x00, 0x13, 0x33, 0x11, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x37, 0x35, 0x33, 0x11, 0x33, 0x11, 0x23, 0x35, 0x64, 0x64, + 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x02, 0x58, 0xfe, 0x0c, + 0x64, 0x64, 0x64, 0x01, 0x90, 0xfd, 0xa8, 0x64, 0x00, 0x05, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x33, 0x11, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x35, 0x11, + 0x33, 0x11, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x02, 0x58, 0xfe, 0xd4, 0xc8, 0x64, 0x64, 0xc8, 0xc8, 0xc8, + 0x01, 0x2c, 0xfe, 0xd4, 0x00, 0x05, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x00, 0x01, 0x33, 0x11, 0x23, 0x31, 0x15, 0x23, 0x35, 0x31, 0x23, + 0x11, 0x33, 0x03, 0x15, 0x23, 0x35, 0x31, 0x23, 0x11, 0x33, 0x02, 0x58, + 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0xfe, + 0x0c, 0x64, 0x64, 0x01, 0x90, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0x00, + 0x00, 0x09, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, + 0x00, 0x1f, 0x00, 0x23, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x3d, 0x01, + 0x33, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x27, 0x15, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0x64, 0x64, 0x02, 0x58, 0x64, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x64, + 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x00, 0x04, 0x00, 0xc8, 0x00, 0x00, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, + 0x00, 0x09, 0x00, 0x0d, 0x00, 0x11, 0x00, 0x00, 0x13, 0x33, 0x11, 0x23, + 0x3b, 0x01, 0x15, 0x23, 0x35, 0x23, 0x37, 0x11, 0x33, 0x11, 0x07, 0x15, + 0x23, 0x35, 0xc8, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0xc8, + 0xc8, 0x02, 0x58, 0xfe, 0xd4, 0xc8, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, + 0xc8, 0x64, 0x64, 0x00, 0x00, 0x04, 0x00, 0xc8, 0x00, 0x00, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x05, 0x00, 0x09, 0x00, 0x0d, 0x00, 0x13, 0x00, 0x00, + 0x13, 0x21, 0x15, 0x23, 0x35, 0x21, 0x05, 0x15, 0x23, 0x35, 0x1d, 0x01, + 0x23, 0x35, 0x1d, 0x01, 0x21, 0x15, 0x21, 0x35, 0xc8, 0x01, 0x90, 0x64, + 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0x70, 0x02, 0x58, + 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x00, 0x07, 0x00, 0xc8, 0xff, 0x9c, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x27, 0x15, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x01, 0x90, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x02, 0xbc, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0xc8, 0xc8, 0xc8, 0xc8, + 0x64, 0x64, 0x64, 0x64, 0x00, 0x01, 0x01, 0x2c, 0xff, 0x9c, 0x01, 0x90, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x00, 0x01, 0x33, 0x11, 0x23, 0x01, 0x2c, + 0x64, 0x64, 0x02, 0xbc, 0xfc, 0xe0, 0x00, 0x00, 0x00, 0x07, 0x00, 0xc8, + 0xff, 0x9c, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x01, 0x33, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x31, 0x23, 0x35, 0x33, 0x11, 0x15, + 0x23, 0x35, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xc8, 0xc8, 0xc8, 0xc8, + 0xc8, 0x64, 0x64, 0x64, 0x01, 0xf4, 0x64, 0x64, 0x00, 0x04, 0x00, 0x00, + 0x00, 0xc8, 0x02, 0x58, 0x01, 0x90, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x37, 0x35, 0x33, 0x15, 0x21, 0x15, 0x23, 0x35, 0x64, 0xc8, 0xc8, 0xc8, + 0xc8, 0xc8, 0xc8, 0x64, 0xfe, 0x0c, 0x64, 0x01, 0x90, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x00, 0x05, 0x00, 0x00, 0xff, 0x9c, 0x02, 0xbc, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, + 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, + 0x21, 0x15, 0x21, 0x15, 0x33, 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, 0x35, + 0x33, 0x35, 0x23, 0x35, 0x33, 0x35, 0x13, 0x21, 0x15, 0x21, 0x25, 0x35, + 0x33, 0x15, 0xc8, 0x01, 0x90, 0xfe, 0x70, 0x01, 0x90, 0x64, 0x64, 0xfe, + 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x01, 0x90, 0xfe, 0x70, 0x01, 0x90, 0x64, 0x02, 0x58, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, + 0x0c, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, 0x2c, + 0xff, 0x9c, 0x01, 0xf4, 0x00, 0xc8, 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, + 0x25, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x01, 0x90, 0x64, 0x64, + 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0xc8, + 0x00, 0x00, 0x01, 0xf4, 0x02, 0x58, 0x00, 0x03, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x33, 0x15, 0x23, 0x15, + 0x23, 0x35, 0x23, 0x35, 0x33, 0x35, 0x11, 0x15, 0x23, 0x35, 0x01, 0x90, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0x64, 0x64, + 0x64, 0xc8, 0xc8, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x00, 0x00, 0x00, + 0x00, 0x04, 0x00, 0xc8, 0xff, 0x9c, 0x02, 0x58, 0x00, 0xc8, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x00, 0x25, 0x33, 0x15, 0x23, + 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x21, 0x15, 0x23, 0x35, + 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0xc8, 0xc8, 0x64, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x00, 0x03, 0x00, 0x64, + 0x00, 0x00, 0x03, 0x20, 0x00, 0x64, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x00, 0x37, 0x33, 0x15, 0x23, 0x25, 0x33, 0x15, 0x23, 0x25, 0x33, + 0x15, 0x23, 0x64, 0x64, 0x64, 0x01, 0x2c, 0x64, 0x64, 0x01, 0x2c, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x01, 0x00, 0xc8, + 0x00, 0x00, 0x01, 0xf4, 0x02, 0x58, 0x00, 0x0b, 0x00, 0x00, 0x01, 0x33, + 0x15, 0x33, 0x15, 0x23, 0x11, 0x23, 0x11, 0x23, 0x35, 0x33, 0x01, 0x2c, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0x64, 0x64, 0xfe, 0x70, + 0x01, 0x90, 0x64, 0x00, 0x00, 0x01, 0x00, 0xc8, 0x00, 0x00, 0x01, 0xf4, + 0x02, 0x58, 0x00, 0x13, 0x00, 0x00, 0x01, 0x33, 0x15, 0x33, 0x15, 0x23, + 0x15, 0x33, 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, 0x35, 0x33, 0x35, 0x23, + 0x35, 0x33, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x02, 0x58, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x64, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x64, 0x01, 0x90, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x27, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x01, 0x2c, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x02, + 0xbc, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x0f, 0x00, 0x00, 0xff, 0x9c, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, + 0x00, 0x1f, 0x00, 0x23, 0x00, 0x27, 0x00, 0x2b, 0x00, 0x2f, 0x00, 0x33, + 0x00, 0x37, 0x00, 0x3b, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x31, 0x23, 0x35, 0x33, 0x25, 0x33, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x31, 0x33, 0x15, 0x23, 0x31, 0x15, + 0x23, 0x35, 0x23, 0x15, 0x23, 0x35, 0x31, 0x23, 0x35, 0x33, 0x21, 0x33, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x01, 0x90, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, + 0xd4, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x64, + 0x64, 0xc8, 0xc8, 0xfe, 0xd4, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0xc8, 0x64, + 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x00, 0x00, 0x08, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, + 0x13, 0x33, 0x15, 0x23, 0x33, 0x21, 0x15, 0x21, 0x25, 0x35, 0x33, 0x15, + 0x05, 0x15, 0x23, 0x35, 0x17, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x35, 0x33, 0x64, 0x64, 0x64, 0x64, + 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x02, + 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x64, 0x64, 0x64, 0x00, 0x00, 0x03, 0x01, 0x2c, 0x00, 0x64, 0x01, 0xf4, + 0x01, 0xf4, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x01, 0x33, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x01, 0x90, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0xf4, 0x64, 0xc8, 0xc8, 0xc8, + 0x64, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x11, 0x00, 0x15, 0x00, 0x19, 0x00, 0x00, 0x13, 0x21, + 0x15, 0x23, 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x21, 0x35, 0x23, + 0x35, 0x33, 0x11, 0x21, 0x13, 0x15, 0x23, 0x35, 0x31, 0x23, 0x11, 0x33, + 0x64, 0x02, 0x58, 0xc8, 0xc8, 0xc8, 0xc8, 0xfe, 0xd4, 0x64, 0x64, 0xfe, + 0xd4, 0xc8, 0xc8, 0x64, 0x64, 0x02, 0xbc, 0x64, 0xc8, 0x64, 0xc8, 0x64, + 0x64, 0x64, 0x01, 0x90, 0xfe, 0x0c, 0x64, 0x64, 0x01, 0xf4, 0x00, 0x00, + 0x00, 0x05, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x0d, 0x00, 0x11, 0x00, 0x15, 0x00, 0x1b, 0x00, 0x00, 0x13, 0x33, + 0x15, 0x23, 0x33, 0x21, 0x35, 0x33, 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, + 0x17, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x1d, 0x01, 0x21, 0x15, + 0x21, 0x35, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0x64, 0x64, 0x64, 0xc8, + 0xc8, 0x64, 0x64, 0x01, 0x90, 0xfe, 0x0c, 0x02, 0xbc, 0x64, 0x64, 0xc8, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x00, 0x02, 0x01, 0x2c, 0x01, 0x2c, 0x01, 0xf4, 0x02, 0x58, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0xc8, 0x64, 0x00, + 0x00, 0x02, 0x01, 0x2c, 0x01, 0x2c, 0x01, 0xf4, 0x02, 0x58, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0x64, 0xc8, 0x00, + 0x00, 0x04, 0x00, 0xc8, 0x01, 0x2c, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, + 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x31, 0x33, 0x15, 0x23, + 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, + 0xc8, 0x64, 0x64, 0xc8, 0xc8, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0xc8, + 0x01, 0x2c, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x37, 0x35, 0x33, 0x15, 0x31, 0x33, 0x15, 0x23, 0xc8, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0x58, 0x64, 0xc8, 0xc8, 0x64, + 0x64, 0xc8, 0x00, 0x00, 0x00, 0x01, 0x00, 0xc8, 0x00, 0x64, 0x02, 0x58, + 0x01, 0xf4, 0x00, 0x0b, 0x00, 0x00, 0x01, 0x33, 0x15, 0x33, 0x15, 0x23, + 0x15, 0x23, 0x35, 0x23, 0x35, 0x33, 0x01, 0x2c, 0xc8, 0x64, 0x64, 0xc8, + 0x64, 0x64, 0x01, 0xf4, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x00, 0x00, 0x00, + 0x00, 0x01, 0x00, 0xc8, 0x01, 0x90, 0x01, 0xf4, 0x01, 0xf4, 0x00, 0x03, + 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0xf4, 0x64, 0x00, 0x00, 0x00, 0x01, 0x00, 0x64, 0x01, 0x2c, 0x02, 0x58, + 0x01, 0x90, 0x00, 0x03, 0x00, 0x00, 0x13, 0x21, 0x15, 0x21, 0x64, 0x01, + 0xf4, 0xfe, 0x0c, 0x01, 0x90, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, + 0x01, 0x90, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x37, 0x35, 0x33, 0x15, 0x21, 0x15, 0x23, 0x35, 0xc8, 0xc8, 0xc8, 0xc8, + 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x02, 0x58, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x00, 0x05, 0x00, 0x00, 0x00, 0xc8, 0x03, 0x20, + 0x02, 0x58, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x17, 0x00, 0x1b, + 0x00, 0x00, 0x11, 0x21, 0x15, 0x23, 0x11, 0x23, 0x11, 0x23, 0x21, 0x33, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x35, 0x33, 0x11, + 0x23, 0x35, 0x21, 0x15, 0x23, 0x35, 0x01, 0x90, 0xc8, 0x64, 0x64, 0x01, + 0x90, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, + 0x64, 0x02, 0x58, 0x64, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, + 0x64, 0xfe, 0x70, 0xc8, 0xc8, 0xc8, 0x00, 0x00, 0x00, 0x08, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, + 0x13, 0x33, 0x15, 0x23, 0x33, 0x21, 0x15, 0x21, 0x25, 0x35, 0x33, 0x15, + 0x05, 0x15, 0x23, 0x35, 0x17, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x35, 0x33, 0x64, 0x64, 0x64, 0x64, + 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x02, + 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x64, 0x64, 0x64, 0x00, 0x00, 0x03, 0x01, 0x2c, 0x00, 0x64, 0x01, 0xf4, + 0x01, 0xf4, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x01, 0x33, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x01, 0x2c, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0xf4, 0x64, 0xc8, 0x64, 0x64, + 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, 0x02, 0x58, 0x00, 0x11, + 0x00, 0x15, 0x00, 0x19, 0x00, 0x00, 0x13, 0x21, 0x15, 0x23, 0x15, 0x33, + 0x15, 0x23, 0x15, 0x33, 0x15, 0x21, 0x35, 0x23, 0x35, 0x33, 0x11, 0x21, + 0x13, 0x15, 0x23, 0x35, 0x31, 0x23, 0x11, 0x33, 0x64, 0x02, 0x58, 0xc8, + 0xc8, 0xc8, 0xc8, 0xfe, 0xd4, 0x64, 0x64, 0xfe, 0xd4, 0xc8, 0xc8, 0x64, + 0x64, 0x02, 0x58, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, + 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0x00, 0x00, 0x00, 0x06, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x09, 0x00, 0x0d, + 0x00, 0x11, 0x00, 0x15, 0x00, 0x1b, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, + 0x33, 0x21, 0x15, 0x23, 0x35, 0x21, 0x25, 0x35, 0x33, 0x15, 0x07, 0x15, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x1d, 0x01, 0x21, 0x15, 0x21, 0x35, + 0x64, 0x64, 0x64, 0x64, 0x01, 0x90, 0x64, 0xfe, 0xd4, 0x01, 0x90, 0x64, + 0xc8, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0x70, 0x02, 0xbc, 0x64, 0xc8, 0x64, + 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x25, 0x33, + 0x15, 0x23, 0x05, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, + 0x11, 0x23, 0x13, 0x35, 0x33, 0x15, 0x3d, 0x01, 0x33, 0x15, 0x64, 0x64, + 0x64, 0x01, 0x90, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, + 0x00, 0x02, 0x01, 0x2c, 0x00, 0x00, 0x01, 0x90, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x15, 0x33, 0x11, 0x23, + 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0x64, 0xfe, 0x0c, + 0x00, 0x06, 0x00, 0x64, 0xff, 0x9c, 0x02, 0x58, 0x02, 0x58, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x33, 0x15, 0x23, 0x11, 0x33, + 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, 0x35, 0x33, 0x11, 0x23, 0x35, 0x33, + 0x17, 0x33, 0x15, 0x23, 0x15, 0x35, 0x33, 0x15, 0x21, 0x23, 0x11, 0x33, + 0x01, 0x3c, 0x10, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0xc8, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0xc8, 0x02, 0x06, + 0x01, 0x98, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, + 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x01, 0x2c, 0x00, 0x06, 0x00, 0x64, + 0xff, 0x9c, 0x02, 0xbc, 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x11, 0x33, 0x15, + 0x23, 0x15, 0x21, 0x15, 0x21, 0x35, 0x33, 0x35, 0x23, 0x35, 0x33, 0x11, + 0x01, 0x35, 0x33, 0x15, 0x01, 0x3c, 0x10, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0xc8, 0xc8, 0x01, 0x2c, 0xfe, 0x0c, 0x64, + 0x64, 0x64, 0x01, 0x90, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x98, 0x64, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, + 0x0c, 0x64, 0x64, 0x00, 0x00, 0x0a, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x25, 0x33, 0x15, 0x23, + 0x21, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, + 0x31, 0x23, 0x35, 0x33, 0x03, 0x33, 0x15, 0x23, 0x25, 0x33, 0x15, 0x23, + 0x01, 0x3c, 0xd8, 0x64, 0x64, 0x01, 0xf4, 0x64, 0x64, 0xfe, 0xd4, 0xc8, + 0xc8, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x01, 0xf4, + 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x98, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x64, 0x64, 0xc8, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x00, 0x07, 0x00, 0x64, + 0xff, 0x9c, 0x02, 0x58, 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x27, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, + 0x35, 0x33, 0x35, 0x23, 0x35, 0x33, 0x37, 0x35, 0x33, 0x15, 0x3d, 0x01, + 0x33, 0x15, 0x01, 0x3c, 0xd8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0xc8, 0xc8, 0xc8, 0x64, 0xc8, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x64, + 0xc8, 0x02, 0x06, 0x01, 0x98, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x04, 0x01, 0x2c, 0xff, 0x9c, 0x01, 0x90, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x33, 0x11, 0x23, 0x15, 0x33, 0x11, 0x23, 0x01, 0x3c, 0x10, 0x64, + 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0xfe, 0xd4, 0x64, 0xfe, + 0x70, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x00, 0xc8, 0xff, 0x9c, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, + 0x17, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, + 0x17, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x3d, 0x01, 0x23, 0x35, 0x33, + 0x01, 0x3c, 0x10, 0xc8, 0xc8, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x64, 0x64, + 0xc8, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0xc8, 0x00, 0x00, 0x00, 0x04, 0x00, 0xc8, 0x00, 0xc0, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x25, 0x33, 0x15, 0x23, + 0x01, 0x3c, 0x74, 0x64, 0x64, 0x01, 0x2c, 0x64, 0x64, 0xc8, 0x02, 0x06, + 0x01, 0x98, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, + 0xff, 0x9c, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, + 0x00, 0x23, 0x00, 0x27, 0x00, 0x2b, 0x00, 0x2f, 0x00, 0x33, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x3b, 0x01, 0x11, 0x23, 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x21, 0x35, + 0x31, 0x23, 0x35, 0x33, 0x2b, 0x01, 0x11, 0x33, 0x31, 0x35, 0x33, 0x15, + 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, + 0x37, 0x35, 0x33, 0x15, 0x01, 0x3c, 0x74, 0x01, 0x90, 0xfe, 0x70, 0x01, + 0x90, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x64, + 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x01, 0x90, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x64, + 0x64, 0x64, 0x64, 0x00, 0x00, 0x05, 0x00, 0x64, 0x00, 0xc0, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x21, 0x35, 0x33, 0x15, 0x33, 0x05, 0x21, 0x15, 0x21, 0x01, 0x3c, + 0x10, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xfe, 0xd4, 0x01, + 0xf4, 0xfe, 0x0c, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0xc8, 0xc8, 0x64, + 0xc8, 0x64, 0x00, 0x00, 0x00, 0x08, 0x00, 0xc8, 0x00, 0x64, 0x02, 0x58, + 0x01, 0xf4, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, + 0x07, 0x33, 0x15, 0x23, 0x27, 0x15, 0x23, 0x35, 0x31, 0x23, 0x35, 0x33, + 0x01, 0x3c, 0x10, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x34, 0x64, 0xc8, + 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x00, 0x00, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x64, 0x02, 0xbc, 0x01, 0x90, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x27, 0x21, + 0x11, 0x23, 0x35, 0x21, 0x01, 0x3c, 0xd8, 0x02, 0x58, 0x64, 0xfe, 0x0c, + 0xc8, 0x02, 0x06, 0xd0, 0xfe, 0xd4, 0xc8, 0x00, 0x00, 0x03, 0x01, 0x2c, + 0x00, 0xc0, 0x01, 0xf4, 0x01, 0x90, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x27, 0x33, 0x15, 0x23, 0x01, 0x3c, + 0x10, 0xc8, 0xc8, 0xc8, 0x02, 0x06, 0xd0, 0x64, 0x00, 0x0b, 0x00, 0x00, + 0xff, 0x9c, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, + 0x00, 0x23, 0x00, 0x27, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, + 0x11, 0x33, 0x17, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, + 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x03, 0x11, 0x23, 0x11, 0x01, 0x3c, + 0xd8, 0x02, 0x58, 0xfd, 0xa8, 0x02, 0x58, 0x64, 0x64, 0xfd, 0xa8, 0x64, + 0x64, 0xc8, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0xfd, 0xa8, 0x64, 0x64, 0x02, + 0x58, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, + 0x01, 0x2c, 0x00, 0x00, 0x00, 0x03, 0x00, 0x64, 0x00, 0xc0, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x21, 0x15, 0x21, 0x01, 0x3c, 0xd8, 0x02, 0x58, 0xfd, + 0xa8, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x00, 0x00, 0x06, 0x00, 0xc8, + 0x00, 0xc0, 0x01, 0xf4, 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, + 0x31, 0x23, 0x35, 0x33, 0x01, 0x3c, 0x10, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x98, 0x64, 0xc8, 0x64, 0x64, + 0xc8, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x13, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x21, + 0x35, 0x33, 0x35, 0x23, 0x35, 0x33, 0x01, 0x3c, 0x10, 0x64, 0xc8, 0xc8, + 0xc8, 0xfe, 0x0c, 0xc8, 0xc8, 0xc8, 0xc8, 0x02, 0x06, 0x01, 0x98, 0xc8, + 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x00, 0x00, 0x00, 0x06, 0x00, 0xc8, + 0x00, 0xc0, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x15, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, + 0x1d, 0x01, 0x33, 0x15, 0x21, 0x35, 0x01, 0x3c, 0x74, 0xc8, 0xc8, 0xc8, + 0x64, 0x64, 0x64, 0xc8, 0xfe, 0xd4, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x00, 0x00, 0x07, 0x00, 0xc8, + 0x00, 0xc0, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x15, + 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x01, 0x3c, + 0x74, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0xc8, + 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x00, 0x04, 0x01, 0x2c, 0x00, 0xc0, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x13, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x01, 0x3c, 0x54, 0x64, + 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x05, 0x00, 0x64, 0xff, 0x9c, 0x02, 0xbc, 0x02, 0x58, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x0b, 0x00, 0x13, 0x00, 0x17, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x11, 0x33, 0x15, 0x23, 0x15, 0x23, 0x01, 0x35, + 0x33, 0x11, 0x33, 0x11, 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x01, 0x3c, + 0xd8, 0x64, 0xc8, 0xc8, 0x64, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0xc8, 0x02, 0x06, 0x01, 0x98, 0xfe, 0x70, 0x64, 0xc8, 0x01, 0x2c, + 0x64, 0x01, 0x2c, 0xfe, 0x0c, 0x64, 0x64, 0x64, 0x00, 0x03, 0x00, 0x64, + 0xff, 0x9c, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x11, + 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x11, 0x23, 0x11, 0x23, + 0x11, 0x23, 0x11, 0x23, 0x35, 0x23, 0x35, 0x33, 0x01, 0x3c, 0x74, 0x01, + 0x90, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, + 0xfc, 0xe0, 0x02, 0xbc, 0xfd, 0x44, 0x01, 0x90, 0x64, 0xc8, 0x00, 0x00, + 0x00, 0x03, 0x01, 0x2c, 0x00, 0xc0, 0x01, 0xf4, 0x01, 0x90, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x27, 0x33, + 0x15, 0x23, 0x01, 0x3c, 0x10, 0xc8, 0xc8, 0xc8, 0x02, 0x06, 0xd0, 0xc8, + 0x00, 0x04, 0x01, 0x2c, 0xff, 0x9c, 0x01, 0xf4, 0x00, 0xc8, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x07, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x01, 0x3c, 0x10, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x5c, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x03, 0x00, 0xc8, 0x00, 0xc0, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x0f, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, + 0x11, 0x33, 0x15, 0x21, 0x35, 0x33, 0x35, 0x23, 0x35, 0x33, 0x01, 0x3c, + 0x10, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, + 0xfc, 0xfe, 0x70, 0x64, 0x64, 0xc8, 0x64, 0x00, 0x00, 0x05, 0x00, 0x64, + 0x00, 0xc0, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, + 0x11, 0x21, 0x37, 0x15, 0x33, 0x35, 0x01, 0x21, 0x15, 0x21, 0x01, 0x3c, + 0x74, 0x01, 0x2c, 0xfe, 0xd4, 0x64, 0x64, 0xfe, 0xd4, 0x01, 0xf4, 0xfe, + 0x0c, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0xfe, 0xd4, 0xc8, 0x64, 0x64, 0xfe, + 0xd4, 0x64, 0x00, 0x00, 0x00, 0x08, 0x00, 0xc8, 0x00, 0x64, 0x02, 0x58, + 0x01, 0xf4, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, + 0x31, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x23, 0x15, 0x23, 0x35, + 0x01, 0x3c, 0x74, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x34, 0x64, 0xc8, 0xc8, 0x64, + 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, + 0xff, 0x9c, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x1b, 0x00, 0x2d, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x01, 0x33, 0x11, 0x33, 0x35, 0x33, 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, + 0x35, 0x33, 0x35, 0x23, 0x05, 0x35, 0x33, 0x15, 0x01, 0x15, 0x23, 0x35, + 0x01, 0x33, 0x11, 0x33, 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, 0x35, 0x33, + 0x15, 0x33, 0x35, 0x23, 0x35, 0x33, 0x01, 0x3c, 0xfe, 0xc4, 0xc8, 0x64, + 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x01, 0x90, 0x64, 0xfe, 0xd4, 0x64, + 0x01, 0xf4, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x02, 0x06, 0x01, 0xfc, 0xfe, 0xd4, 0x64, 0xc8, 0xc8, 0xc8, 0x64, 0xc8, + 0x64, 0xc8, 0xc8, 0xfe, 0x70, 0xc8, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x64, + 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, + 0xff, 0x9c, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x27, 0x00, 0x2d, + 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x01, 0x33, 0x11, 0x33, 0x35, 0x33, + 0x15, 0x23, 0x15, 0x23, 0x35, 0x23, 0x35, 0x33, 0x35, 0x23, 0x05, 0x35, + 0x33, 0x15, 0x01, 0x15, 0x23, 0x35, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x33, 0x15, 0x21, 0x35, + 0x01, 0x3c, 0xfe, 0xc4, 0xc8, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, + 0x01, 0x90, 0x64, 0xfe, 0xd4, 0x64, 0x01, 0x90, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0xc8, 0xfe, 0xd4, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0xfe, 0xd4, + 0x64, 0xc8, 0xc8, 0xc8, 0x64, 0xc8, 0x64, 0xc8, 0xc8, 0xfe, 0x70, 0xc8, + 0xc8, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x00, + 0x00, 0x0c, 0x00, 0x00, 0xff, 0x9c, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x2d, 0x00, 0x31, 0x00, 0x35, 0x00, 0x39, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x01, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x31, 0x15, 0x23, 0x35, 0x17, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x37, 0x11, 0x33, 0x11, 0x31, 0x33, 0x35, 0x33, 0x11, 0x33, 0x15, 0x23, + 0x15, 0x23, 0x35, 0x23, 0x35, 0x33, 0x15, 0x33, 0x35, 0x2b, 0x01, 0x15, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x37, 0x23, 0x35, 0x33, 0x01, 0x3c, + 0xfe, 0xc4, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, + 0x64, 0xc8, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x02, 0x06, 0x01, 0xfc, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x90, 0xfe, 0x70, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, + 0xc8, 0xc8, 0x64, 0x64, 0x00, 0x08, 0x00, 0x64, 0xff, 0x9c, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x33, 0x15, 0x23, 0x17, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, + 0x1d, 0x01, 0x23, 0x35, 0x17, 0x21, 0x15, 0x21, 0x25, 0x35, 0x33, 0x15, + 0x01, 0x3c, 0x10, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x98, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0x64, + 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x17, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, + 0x3b, 0x01, 0x15, 0x23, 0x33, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, + 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, + 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x02, 0xbc, 0x64, 0x64, 0x64, 0xfe, 0x70, + 0x64, 0x64, 0x01, 0x90, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x17, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, + 0x1d, 0x01, 0x21, 0x35, 0x05, 0x33, 0x11, 0x23, 0x35, 0x21, 0x15, 0x23, + 0x11, 0x33, 0x15, 0x21, 0x02, 0x58, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x02, 0xbc, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, + 0xc8, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x1b, + 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, + 0x23, 0x35, 0x17, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, 0x35, 0x21, + 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, 0x64, 0x01, 0xf4, 0xfe, 0x0c, 0x01, + 0xf4, 0x64, 0x64, 0xfe, 0x0c, 0x64, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x02, 0xbc, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, + 0xc8, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x03, 0x20, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x1b, + 0x00, 0x1f, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x37, 0x35, 0x33, 0x15, 0x05, 0x15, 0x21, 0x35, 0x05, 0x33, 0x11, 0x23, + 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, 0x01, 0x15, 0x23, 0x35, + 0x64, 0x01, 0x90, 0xfe, 0x70, 0x01, 0x90, 0xc8, 0xc8, 0xc8, 0x64, 0xfe, + 0xd4, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, + 0x2c, 0xfe, 0x70, 0x64, 0x02, 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0xc8, 0x01, 0x90, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x17, 0x00, 0x00, + 0x13, 0x33, 0x15, 0x23, 0x37, 0x33, 0x15, 0x23, 0x07, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x11, 0x23, 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, + 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x02, 0xbc, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0xc8, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x21, 0x11, 0x29, 0x01, 0x33, + 0x11, 0x23, 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, 0x03, 0x15, + 0x33, 0x35, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, + 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xc8, 0x64, 0x02, 0xbc, 0xfe, 0xd4, 0xfe, + 0x70, 0x64, 0x64, 0x01, 0x90, 0xc8, 0x01, 0x90, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x01, 0x21, 0x15, 0x23, 0x15, 0x33, + 0x15, 0x23, 0x15, 0x33, 0x15, 0x21, 0x11, 0x21, 0x35, 0x33, 0x15, 0x33, + 0x35, 0x23, 0x03, 0x11, 0x23, 0x11, 0x37, 0x35, 0x33, 0x15, 0x01, 0x2c, + 0x01, 0x90, 0xc8, 0xc8, 0xc8, 0xc8, 0xfe, 0xd4, 0xfe, 0xd4, 0x64, 0xc8, + 0x64, 0xc8, 0x64, 0xc8, 0x64, 0x02, 0xbc, 0x64, 0xc8, 0x64, 0xc8, 0x64, + 0x01, 0x2c, 0xc8, 0x64, 0xc8, 0xfe, 0xd4, 0xfe, 0xd4, 0x01, 0x2c, 0xc8, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x06, 0x00, 0x64, 0xff, 0x9c, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x25, 0x11, 0x23, 0x11, 0x13, 0x21, 0x15, 0x21, 0x25, 0x35, 0x33, 0x15, + 0x07, 0x33, 0x15, 0x23, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0x64, 0x64, 0x64, 0x02, 0xbc, 0x64, 0x64, 0x64, 0xfe, 0x0c, 0x01, 0xf4, + 0xfe, 0x0c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x02, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, + 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, + 0x90, 0xfe, 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x90, 0xfe, 0x0c, 0x02, + 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x03, 0x00, 0x64, + 0x00, 0x00, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x13, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, + 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x11, 0x02, 0xbc, + 0x64, 0x64, 0x64, 0xfe, 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x90, 0xfe, + 0x0c, 0x02, 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x01, 0xf4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x17, 0x00, 0x00, + 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, + 0x17, 0x21, 0x15, 0x23, 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x21, + 0x64, 0x01, 0xf4, 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0x64, 0xfe, 0x0c, 0x64, + 0xc8, 0x01, 0x2c, 0xc8, 0xc8, 0xc8, 0xc8, 0xfe, 0xd4, 0x02, 0xbc, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x13, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x37, 0x33, + 0x15, 0x23, 0x05, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x15, 0x21, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xfe, 0xd4, 0x01, 0xf4, + 0xfe, 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x90, 0xfe, 0x0c, 0x02, 0xbc, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, + 0x00, 0x05, 0x00, 0xc8, 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x13, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x15, 0x33, + 0x11, 0x33, 0x15, 0x21, 0x35, 0x33, 0x01, 0x3c, 0x74, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, + 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x00, 0x00, 0x05, 0x00, 0xc8, + 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x13, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x13, 0x33, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x15, 0x33, 0x11, 0x33, 0x15, 0x21, + 0x35, 0x33, 0x01, 0x3c, 0x54, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, + 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0xc8, 0xfe, 0xd4, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x17, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, 0x17, 0x33, 0x11, 0x33, + 0x15, 0x21, 0x35, 0x33, 0x01, 0x3c, 0x74, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0xc8, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, + 0x70, 0x64, 0x64, 0x00, 0x00, 0x05, 0x00, 0xc8, 0x00, 0x00, 0x01, 0xf4, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x13, + 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x37, 0x33, + 0x15, 0x23, 0x07, 0x33, 0x11, 0x33, 0x15, 0x21, 0x35, 0x33, 0x01, 0x3c, + 0x74, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0xc8, 0xfe, 0xd4, 0x64, + 0x64, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x15, 0x00, 0x19, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x15, 0x21, 0x11, 0x33, 0x15, 0x23, + 0x15, 0x21, 0x15, 0x21, 0x35, 0x23, 0x35, 0x33, 0x11, 0x23, 0x21, 0x33, + 0x11, 0x23, 0x01, 0x3c, 0xd8, 0x01, 0xf4, 0xfe, 0xd4, 0x64, 0x64, 0x01, + 0x2c, 0xfe, 0x70, 0x64, 0x64, 0x64, 0x01, 0xf4, 0x64, 0x64, 0xc8, 0x02, + 0x06, 0x01, 0xfc, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x01, + 0x2c, 0xfe, 0x0c, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x03, 0x20, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x1b, 0x00, 0x23, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, + 0x21, 0x15, 0x23, 0x35, 0x17, 0x33, 0x15, 0x33, 0x15, 0x23, 0x11, 0x23, + 0x13, 0x33, 0x35, 0x33, 0x11, 0x23, 0x35, 0x23, 0x01, 0x3c, 0xd8, 0x01, + 0x90, 0xfe, 0x70, 0x01, 0x90, 0xc8, 0xc8, 0xc8, 0x64, 0xfd, 0x44, 0x64, + 0xc8, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, + 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0xfe, 0x70, 0xc8, 0x00, 0x00, 0x00, + 0x00, 0x08, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, + 0x3b, 0x01, 0x15, 0x23, 0x33, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, 0x01, 0x3c, 0xd8, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x00, 0x00, 0x08, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, + 0x1d, 0x01, 0x21, 0x35, 0x05, 0x33, 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, + 0x31, 0x23, 0x11, 0x33, 0x01, 0x3c, 0x01, 0x1c, 0x64, 0x64, 0x64, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x02, 0x06, + 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0x64, 0x01, 0x2c, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, + 0x23, 0x35, 0x17, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, 0x31, 0x15, + 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, 0x01, 0x3c, 0xd8, 0x01, 0xf4, 0xfe, + 0x0c, 0x01, 0xf4, 0x64, 0x64, 0xfe, 0x0c, 0x64, 0xc8, 0x01, 0x2c, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x02, 0x06, + 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0x64, 0x01, 0x2c, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x03, 0x20, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x37, 0x35, 0x33, 0x15, 0x05, 0x15, 0x21, 0x35, 0x05, 0x33, 0x11, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, 0x27, 0x15, 0x23, 0x35, + 0x01, 0x3c, 0xd8, 0x01, 0x90, 0xfe, 0x70, 0x01, 0x90, 0xc8, 0xc8, 0xc8, + 0x64, 0xfe, 0xd4, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xc8, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x08, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x33, 0x15, 0x23, 0x37, 0x33, 0x15, 0x23, 0x07, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, + 0x01, 0x3c, 0x74, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x01, 0x2c, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x02, 0x06, + 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, + 0x2c, 0x00, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x64, 0x00, 0x64, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x27, + 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x3d, 0x01, + 0x33, 0x15, 0x07, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x27, 0x15, + 0x23, 0x35, 0x1d, 0x01, 0x23, 0x35, 0x01, 0x3c, 0xd8, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, + 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x98, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x00, 0x0c, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x27, + 0x00, 0x2b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x07, 0x33, 0x11, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x35, 0x33, 0x07, 0x15, 0x23, 0x35, + 0x37, 0x23, 0x11, 0x33, 0x13, 0x35, 0x33, 0x15, 0x3d, 0x01, 0x33, 0x15, + 0x01, 0x3c, 0x74, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x01, 0x90, 0xfe, 0x70, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x00, 0x00, 0x00, + 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x11, 0x23, + 0x33, 0x21, 0x15, 0x21, 0x25, 0x11, 0x33, 0x11, 0x01, 0x3c, 0xd8, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0xfe, 0x0c, 0x64, 0x64, 0x01, 0x90, + 0xfe, 0x70, 0x00, 0x00, 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x01, 0x33, 0x15, 0x23, + 0x31, 0x11, 0x23, 0x19, 0x01, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, + 0x01, 0x3c, 0x01, 0x1c, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, + 0x02, 0x06, 0x01, 0xfc, 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0xfe, 0x0c, 0x64, + 0x64, 0x01, 0x90, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, + 0x17, 0x33, 0x11, 0x23, 0x33, 0x21, 0x15, 0x21, 0x25, 0x11, 0x33, 0x11, + 0x01, 0x3c, 0xd8, 0x01, 0xf4, 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0x64, 0xfe, + 0x0c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, + 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x00, 0x07, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x37, 0x33, 0x15, 0x23, 0x05, 0x33, + 0x11, 0x23, 0x33, 0x21, 0x15, 0x21, 0x25, 0x11, 0x33, 0x11, 0x01, 0x3c, + 0x74, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, + 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0xfe, 0x70, 0x00, + 0x00, 0x09, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, + 0x11, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, + 0x33, 0x15, 0x3d, 0x01, 0x33, 0x15, 0x03, 0x33, 0x15, 0x23, 0x31, 0x15, + 0x23, 0x35, 0x01, 0x3c, 0xd8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, + 0xfc, 0xfe, 0x70, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x01, + 0x90, 0x64, 0x64, 0x64, 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x01, 0xf4, + 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, + 0x15, 0x23, 0x15, 0x23, 0x01, 0x33, 0x15, 0x23, 0x01, 0x3c, 0xd8, 0x64, + 0xc8, 0xc8, 0xc8, 0xc8, 0x64, 0x01, 0x2c, 0x64, 0x64, 0xc8, 0x02, 0x06, + 0x01, 0x98, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x01, 0x90, 0xc8, 0x00, 0x00, + 0x00, 0x06, 0x00, 0x64, 0xff, 0x9c, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x11, 0x00, 0x15, 0x00, 0x19, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x33, + 0x11, 0x23, 0x35, 0x23, 0x35, 0x33, 0x13, 0x15, 0x23, 0x35, 0x03, 0x11, + 0x23, 0x11, 0x01, 0x3c, 0x74, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x64, 0xc8, + 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0xc8, + 0xfe, 0xd4, 0xc8, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0xf4, 0xfd, 0x44, + 0x02, 0xbc, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x17, 0x00, 0x00, + 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x33, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x11, 0x23, 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x02, 0xbc, 0x64, + 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0xc8, 0x00, 0x00, 0x00, + 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x0b, 0x00, 0x17, 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, + 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x21, 0x35, 0x05, 0x33, 0x11, 0x23, + 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, 0x02, 0x58, 0x64, 0x64, + 0x64, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, + 0x2c, 0x02, 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, + 0x64, 0x64, 0x01, 0x90, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x1b, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, + 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, 0x17, 0x21, 0x15, 0x29, 0x01, 0x33, + 0x11, 0x23, 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, 0x64, 0x01, + 0xf4, 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0x64, 0xfe, 0x0c, 0x64, 0xc8, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, + 0x2c, 0x02, 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, + 0x64, 0x64, 0x01, 0x90, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, + 0x00, 0x00, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x05, 0x15, 0x21, 0x35, + 0x05, 0x33, 0x11, 0x23, 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, + 0x01, 0x15, 0x23, 0x35, 0x64, 0x01, 0x90, 0xfe, 0x70, 0x01, 0x90, 0xc8, + 0xc8, 0xc8, 0x64, 0xfe, 0xd4, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, + 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0x70, 0x64, 0x02, 0xbc, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, + 0x90, 0xc8, 0x01, 0x90, 0x64, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x17, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x37, 0x33, 0x15, 0x23, + 0x07, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, 0x35, 0x21, 0x15, 0x23, + 0x11, 0x33, 0x15, 0x21, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, + 0x2c, 0x02, 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, + 0x01, 0x90, 0xc8, 0x00, 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, 0x13, 0x21, + 0x11, 0x29, 0x01, 0x33, 0x11, 0x23, 0x35, 0x21, 0x15, 0x23, 0x11, 0x33, + 0x15, 0x21, 0x03, 0x15, 0x33, 0x35, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xc8, 0x64, 0x02, + 0xbc, 0xfe, 0xd4, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0xc8, 0x01, 0x90, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0x58, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x01, 0x21, + 0x15, 0x23, 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x21, 0x35, 0x21, + 0x35, 0x33, 0x15, 0x33, 0x35, 0x23, 0x03, 0x15, 0x23, 0x35, 0x37, 0x35, + 0x33, 0x15, 0x01, 0x2c, 0x01, 0x90, 0xc8, 0xc8, 0xc8, 0xc8, 0xfe, 0xd4, + 0xfe, 0xd4, 0x64, 0xc8, 0x64, 0xc8, 0x64, 0xc8, 0x64, 0x02, 0x58, 0x64, + 0xc8, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x64, 0xc8, 0xfe, 0xd4, 0xc8, 0xc8, + 0xc8, 0x64, 0x64, 0x00, 0x00, 0x06, 0x00, 0x64, 0xff, 0x9c, 0x02, 0x58, + 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, + 0x00, 0x17, 0x00, 0x00, 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, + 0x25, 0x11, 0x23, 0x11, 0x13, 0x21, 0x15, 0x21, 0x25, 0x35, 0x33, 0x15, + 0x07, 0x33, 0x15, 0x23, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0x64, 0x64, 0x64, 0x02, 0x58, 0x64, 0x64, 0x64, 0xfe, 0x70, 0x01, 0x90, + 0xfe, 0x70, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x02, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, + 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, + 0x90, 0xfe, 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x90, 0xfe, 0x0c, 0x02, + 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x03, 0x00, 0x64, + 0x00, 0x00, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x13, + 0x00, 0x00, 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, + 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x11, 0x02, 0xbc, + 0x64, 0x64, 0x64, 0xfe, 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x90, 0xfe, + 0x0c, 0x02, 0xbc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x01, 0xf4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x17, 0x00, 0x00, + 0x13, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, + 0x17, 0x21, 0x15, 0x23, 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, 0x15, 0x21, + 0x64, 0x01, 0xf4, 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0x64, 0xfe, 0x0c, 0x64, + 0xc8, 0x01, 0x2c, 0xc8, 0xc8, 0xc8, 0xc8, 0xfe, 0xd4, 0x02, 0xbc, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x03, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x03, + 0x00, 0x07, 0x00, 0x13, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x37, 0x33, + 0x15, 0x23, 0x05, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, 0x15, 0x21, + 0x15, 0x21, 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xfe, 0xd4, 0x01, 0xf4, + 0xfe, 0x70, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x90, 0xfe, 0x0c, 0x02, 0xbc, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, + 0x00, 0x05, 0x00, 0xc8, 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x13, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x15, 0x33, + 0x11, 0x33, 0x15, 0x21, 0x35, 0x33, 0x01, 0x3c, 0x74, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, + 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x00, 0x00, 0x05, 0x00, 0xc8, + 0x00, 0x00, 0x01, 0xf4, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x13, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x13, 0x33, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x15, 0x33, 0x11, 0x33, 0x15, 0x21, + 0x35, 0x33, 0x01, 0x3c, 0x54, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, + 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0xc8, 0xfe, 0xd4, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x17, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, 0x17, 0x33, 0x11, 0x33, + 0x15, 0x21, 0x35, 0x33, 0x01, 0x3c, 0x74, 0x01, 0x2c, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0xc8, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, + 0x70, 0x64, 0x64, 0x00, 0x00, 0x05, 0x00, 0xc8, 0x00, 0x00, 0x01, 0xf4, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x13, + 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x37, 0x33, + 0x15, 0x23, 0x07, 0x33, 0x11, 0x33, 0x15, 0x21, 0x35, 0x33, 0x01, 0x3c, + 0x74, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0xc8, 0xfe, 0xd4, 0x64, + 0x64, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x13, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x31, 0x11, 0x23, 0x19, + 0x01, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, 0x31, 0x35, 0x21, 0x15, + 0x01, 0x3c, 0xb8, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0x64, 0x01, 0x2c, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0xc8, 0x64, 0xfe, 0xd4, + 0x01, 0x2c, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x1b, + 0x00, 0x23, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x21, 0x15, 0x23, 0x35, + 0x17, 0x33, 0x15, 0x33, 0x15, 0x23, 0x11, 0x23, 0x13, 0x33, 0x35, 0x33, + 0x11, 0x23, 0x35, 0x23, 0x01, 0x3c, 0xd8, 0x01, 0x90, 0xfe, 0x70, 0x01, + 0x90, 0xc8, 0xc8, 0xc8, 0x64, 0xfd, 0x44, 0x64, 0xc8, 0x64, 0x64, 0x64, + 0x64, 0xc8, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x01, + 0x2c, 0x64, 0xfe, 0x70, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x64, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, + 0x33, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, + 0x31, 0x23, 0x11, 0x33, 0x01, 0x3c, 0xd8, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, + 0x64, 0x01, 0x2c, 0x00, 0x00, 0x08, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x01, 0x33, 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x1d, 0x01, 0x21, 0x35, + 0x05, 0x33, 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, + 0x01, 0x3c, 0x01, 0x1c, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x00, + 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, 0x17, 0x21, + 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, + 0x11, 0x33, 0x01, 0x3c, 0xd8, 0x01, 0xf4, 0xfe, 0x0c, 0x01, 0xf4, 0x64, + 0x64, 0xfe, 0x0c, 0x64, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x00, + 0x00, 0x0a, 0x00, 0x00, 0x00, 0x00, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, + 0x05, 0x15, 0x21, 0x35, 0x05, 0x33, 0x11, 0x23, 0x31, 0x15, 0x21, 0x35, + 0x31, 0x23, 0x11, 0x33, 0x27, 0x15, 0x23, 0x35, 0x01, 0x3c, 0xd8, 0x01, + 0x90, 0xfe, 0x70, 0x01, 0x90, 0xc8, 0xc8, 0xc8, 0x64, 0xfe, 0xd4, 0xfe, + 0xd4, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xc8, 0x64, 0x64, 0x00, 0x00, + 0x00, 0x08, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, + 0x37, 0x33, 0x15, 0x23, 0x07, 0x21, 0x15, 0x29, 0x01, 0x33, 0x11, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, 0x01, 0x3c, 0x74, 0x64, + 0x64, 0xc8, 0x64, 0x64, 0xc8, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0x00, 0x00, 0x00, + 0x00, 0x05, 0x00, 0x64, 0x00, 0x64, 0x02, 0x58, 0x02, 0x58, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x07, 0x21, 0x15, 0x21, 0x17, 0x33, + 0x15, 0x23, 0x01, 0x3c, 0x10, 0x64, 0x64, 0xc8, 0x01, 0xf4, 0xfe, 0x0c, + 0xc8, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x98, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x23, 0x00, 0x27, + 0x00, 0x2b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x21, 0x15, 0x29, + 0x01, 0x33, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x07, 0x33, 0x11, 0x23, + 0x31, 0x15, 0x21, 0x35, 0x31, 0x23, 0x35, 0x33, 0x07, 0x15, 0x23, 0x35, + 0x37, 0x23, 0x11, 0x33, 0x13, 0x35, 0x33, 0x15, 0x3d, 0x01, 0x33, 0x15, + 0x01, 0x3c, 0x74, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, 0x98, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, + 0x01, 0x2c, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x64, 0x64, 0x00, 0x00, 0x00, + 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x11, 0x23, + 0x33, 0x21, 0x15, 0x21, 0x25, 0x11, 0x33, 0x11, 0x01, 0x3c, 0xd8, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, + 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0xfe, 0x0c, 0x64, 0x64, 0x01, 0x90, + 0xfe, 0x70, 0x00, 0x00, 0x00, 0x06, 0x00, 0x64, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x01, 0x33, 0x15, 0x23, + 0x31, 0x11, 0x23, 0x19, 0x01, 0x15, 0x21, 0x35, 0x31, 0x23, 0x11, 0x33, + 0x01, 0x3c, 0x01, 0x1c, 0x64, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, + 0x02, 0x06, 0x01, 0xfc, 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0xfe, 0x0c, 0x64, + 0x64, 0x01, 0x90, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, + 0x00, 0x13, 0x00, 0x17, 0x00, 0x1b, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, + 0x03, 0x21, 0x15, 0x29, 0x01, 0x33, 0x15, 0x23, 0x25, 0x15, 0x23, 0x35, + 0x17, 0x33, 0x11, 0x23, 0x33, 0x21, 0x15, 0x21, 0x25, 0x11, 0x33, 0x11, + 0x01, 0x3c, 0xd8, 0x01, 0xf4, 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0x64, 0xfe, + 0x0c, 0x64, 0x64, 0x64, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x01, 0x2c, + 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x2c, 0xfe, 0xd4, 0x00, 0x07, 0x00, 0x64, + 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, 0x00, 0x03, 0x00, 0x07, + 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, 0x00, 0x00, 0x25, 0x15, + 0x31, 0x15, 0x03, 0x33, 0x15, 0x23, 0x37, 0x33, 0x15, 0x23, 0x05, 0x33, + 0x11, 0x23, 0x33, 0x21, 0x15, 0x21, 0x25, 0x11, 0x33, 0x11, 0x01, 0x3c, + 0x74, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x64, 0x01, + 0x2c, 0xfe, 0xd4, 0x01, 0x2c, 0x64, 0xc8, 0x02, 0x06, 0x01, 0xfc, 0x64, + 0x64, 0x64, 0x64, 0xfe, 0x70, 0x64, 0x64, 0x01, 0x90, 0xfe, 0x70, 0x00, + 0x00, 0x09, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x13, 0x33, + 0x15, 0x23, 0x31, 0x15, 0x23, 0x35, 0x07, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x3d, 0x01, + 0x33, 0x15, 0x01, 0x3c, 0x54, 0x64, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x64, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x02, 0x06, 0x01, + 0xfc, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0xc8, 0xc8, 0x64, 0x64, 0x64, + 0xc8, 0xc8, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, 0x00, 0x00, 0x01, 0xf4, + 0x02, 0x58, 0x00, 0x01, 0x00, 0x03, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x00, + 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, 0x15, 0x33, 0x15, 0x23, 0x15, 0x33, + 0x15, 0x23, 0x15, 0x23, 0x01, 0x33, 0x15, 0x23, 0x01, 0x3c, 0xd8, 0x64, + 0xc8, 0xc8, 0xc8, 0xc8, 0x64, 0x01, 0x2c, 0x64, 0x64, 0xc8, 0x02, 0x06, + 0x01, 0x98, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x01, 0x90, 0xc8, 0x00, 0x00, + 0x00, 0x09, 0x00, 0x64, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x01, + 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x1b, 0x00, 0x1f, 0x00, 0x00, 0x25, 0x15, 0x31, 0x15, 0x03, 0x33, + 0x15, 0x23, 0x37, 0x33, 0x15, 0x23, 0x05, 0x33, 0x15, 0x23, 0x3b, 0x01, + 0x15, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x37, 0x35, 0x33, 0x15, 0x3d, 0x01, + 0x33, 0x15, 0x01, 0x3c, 0x74, 0x64, 0x64, 0xc8, 0x64, 0x64, 0xfe, 0xd4, + 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0x64, 0xc8, + 0x02, 0x06, 0x01, 0xfc, 0x64, 0x64, 0x64, 0x64, 0xc8, 0x64, 0xc8, 0xc8, + 0x64, 0x64, 0x64, 0xc8, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, + 0x00, 0x00, 0x02, 0xbc, 0x02, 0xbc, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x0f, 0x00, 0x15, 0x00, 0x00, 0x13, 0x33, 0x15, 0x23, 0x31, 0x15, + 0x23, 0x35, 0x21, 0x33, 0x15, 0x23, 0x3b, 0x01, 0x11, 0x23, 0x03, 0x15, + 0x23, 0x11, 0x23, 0x11, 0x64, 0x64, 0x64, 0x64, 0x01, 0x90, 0xc8, 0xc8, + 0xc8, 0x64, 0x64, 0xc8, 0x64, 0x64, 0x02, 0xbc, 0x64, 0x64, 0x64, 0x64, + 0xfe, 0x0c, 0x01, 0xf4, 0x64, 0xfe, 0x70, 0x01, 0xf4, 0x00, 0x00, 0x00, + 0x00, 0x02, 0x00, 0xc8, 0x00, 0x00, 0x02, 0x58, 0x02, 0xbc, 0x00, 0x05, + 0x00, 0x09, 0x00, 0x00, 0x13, 0x33, 0x11, 0x21, 0x15, 0x21, 0x01, 0x33, + 0x15, 0x23, 0xc8, 0x64, 0x01, 0x2c, 0xfe, 0x70, 0x01, 0x2c, 0x64, 0x64, + 0x02, 0xbc, 0xfd, 0xa8, 0x64, 0x01, 0xf4, 0x64, 0x00, 0x03, 0x00, 0xc8, + 0x00, 0x00, 0x02, 0x58, 0x02, 0x58, 0x00, 0x03, 0x00, 0x07, 0x00, 0x0b, + 0x00, 0x00, 0x13, 0x33, 0x11, 0x23, 0x3b, 0x01, 0x15, 0x23, 0x13, 0x33, + 0x15, 0x23, 0xc8, 0x64, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x64, 0x64, 0x02, + 0x58, 0xfe, 0x0c, 0x64, 0x01, 0xf4, 0x64, 0x00, 0x00, 0x03, 0x00, 0x00, + 0x00, 0x00, 0x03, 0x20, 0x02, 0xbc, 0x00, 0x0b, 0x00, 0x13, 0x00, 0x17, + 0x00, 0x00, 0x11, 0x21, 0x15, 0x23, 0x11, 0x33, 0x15, 0x21, 0x35, 0x33, + 0x11, 0x23, 0x25, 0x21, 0x15, 0x23, 0x11, 0x23, 0x11, 0x23, 0x13, 0x15, + 0x23, 0x35, 0x01, 0x2c, 0x64, 0x64, 0xfe, 0xd4, 0x64, 0x64, 0x01, 0x90, + 0x01, 0x90, 0x64, 0x64, 0xc8, 0xc8, 0xc8, 0x02, 0xbc, 0x64, 0xfe, 0x0c, + 0x64, 0x64, 0x01, 0xf4, 0x64, 0x64, 0xfe, 0x0c, 0x01, 0xf4, 0xfe, 0x0c, + 0x64, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x64, 0xff, 0x9c, 0x02, 0xbc, + 0x02, 0xbc, 0x00, 0x03, 0x00, 0x0b, 0x00, 0x0f, 0x00, 0x19, 0x00, 0x00, + 0x13, 0x33, 0x15, 0x23, 0x33, 0x21, 0x15, 0x23, 0x11, 0x23, 0x11, 0x23, + 0x13, 0x15, 0x23, 0x35, 0x31, 0x21, 0x35, 0x33, 0x11, 0x23, 0x35, 0x33, + 0x11, 0x33, 0xc8, 0x64, 0x64, 0xc8, 0x01, 0x2c, 0x64, 0x64, 0x64, 0x64, + 0x64, 0xfe, 0xd4, 0x64, 0x64, 0xc8, 0x64, 0x02, 0xbc, 0x64, 0x64, 0xfe, + 0x0c, 0x01, 0xf4, 0xfe, 0x0c, 0x64, 0x64, 0x64, 0x01, 0x2c, 0x64, 0xfe, + 0x70, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0xf6, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x25, 0x00, 0x4c, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x06, 0x00, 0x80, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x07, 0x00, 0x97, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x22, 0x00, 0xe5, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x06, 0x01, 0x16, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x0f, 0x01, 0x3d, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x06, 0x01, 0x5b, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x00, 0x0d, 0x01, 0x7e, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x11, 0x39, 0x24, 0x00, 0x00, 0x01, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x1a, 0x35, 0x70, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x00, 0x00, 0x4a, 0x00, 0x00, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x01, 0x00, 0x0c, 0x00, 0x72, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x02, 0x00, 0x0e, 0x00, 0x87, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x03, 0x00, 0x44, 0x00, 0x9f, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x04, 0x00, 0x0c, 0x01, 0x08, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x05, 0x00, 0x1e, 0x01, 0x1d, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x06, 0x00, 0x0c, 0x01, 0x4d, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x09, 0x00, 0x1a, 0x01, 0x62, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x0d, 0x22, 0x72, 0x01, 0x8c, 0x00, 0x03, + 0x00, 0x01, 0x04, 0x09, 0x00, 0x0e, 0x00, 0x34, 0x35, 0x3a, 0x00, 0x4f, + 0x00, 0x46, 0x00, 0x4c, 0x00, 0x20, 0x00, 0x43, 0x00, 0x6f, 0x00, 0x70, + 0x00, 0x79, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, 0x00, 0x68, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x28, 0x00, 0x63, 0x00, 0x29, 0x00, 0x20, 0x00, 0x32, + 0x00, 0x30, 0x00, 0x32, 0x00, 0x30, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x52, + 0x00, 0x6f, 0x00, 0x62, 0x00, 0x65, 0x00, 0x72, 0x00, 0x74, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x4d, 0x00, 0x6f, 0x00, 0x63, 0x00, 0x63, 0x00, 0x69, + 0x00, 0x00, 0x4f, 0x46, 0x4c, 0x20, 0x43, 0x6f, 0x70, 0x79, 0x72, 0x69, + 0x67, 0x68, 0x74, 0x20, 0x28, 0x63, 0x29, 0x20, 0x32, 0x30, 0x32, 0x30, + 0x2c, 0x20, 0x52, 0x6f, 0x62, 0x65, 0x72, 0x74, 0x6f, 0x20, 0x4d, 0x6f, + 0x63, 0x63, 0x69, 0x00, 0x00, 0x44, 0x00, 0x6f, 0x00, 0x67, 0x00, 0x69, + 0x00, 0x63, 0x00, 0x61, 0x00, 0x00, 0x44, 0x6f, 0x67, 0x69, 0x63, 0x61, + 0x00, 0x00, 0x52, 0x00, 0x65, 0x00, 0x67, 0x00, 0x75, 0x00, 0x6c, 0x00, + 0x61, 0x00, 0x72, 0x00, 0x00, 0x52, 0x65, 0x67, 0x75, 0x6c, 0x61, 0x72, + 0x00, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x46, 0x00, + 0x6f, 0x00, 0x72, 0x00, 0x67, 0x00, 0x65, 0x00, 0x20, 0x00, 0x32, 0x00, + 0x2e, 0x00, 0x30, 0x00, 0x20, 0x00, 0x3a, 0x00, 0x20, 0x00, 0x44, 0x00, + 0x6f, 0x00, 0x67, 0x00, 0x69, 0x00, 0x63, 0x00, 0x61, 0x00, 0x20, 0x00, + 0x3a, 0x00, 0x20, 0x00, 0x31, 0x00, 0x33, 0x00, 0x2d, 0x00, 0x35, 0x00, + 0x2d, 0x00, 0x32, 0x00, 0x30, 0x00, 0x32, 0x00, 0x30, 0x00, 0x00, 0x46, + 0x6f, 0x6e, 0x74, 0x46, 0x6f, 0x72, 0x67, 0x65, 0x20, 0x32, 0x2e, 0x30, + 0x20, 0x3a, 0x20, 0x44, 0x6f, 0x67, 0x69, 0x63, 0x61, 0x20, 0x3a, 0x20, + 0x31, 0x33, 0x2d, 0x35, 0x2d, 0x32, 0x30, 0x32, 0x30, 0x00, 0x00, 0x44, + 0x00, 0x6f, 0x00, 0x67, 0x00, 0x69, 0x00, 0x63, 0x00, 0x61, 0x00, 0x00, + 0x44, 0x6f, 0x67, 0x69, 0x63, 0x61, 0x00, 0x00, 0x56, 0x00, 0x65, 0x00, + 0x72, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x20, 0x00, + 0x30, 0x00, 0x30, 0x00, 0x31, 0x00, 0x2e, 0x00, 0x30, 0x00, 0x31, 0x00, + 0x32, 0x00, 0x00, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x30, + 0x30, 0x31, 0x2e, 0x30, 0x31, 0x32, 0x00, 0x00, 0x44, 0x00, 0x6f, 0x00, + 0x67, 0x00, 0x69, 0x00, 0x63, 0x00, 0x61, 0x00, 0x00, 0x44, 0x6f, 0x67, + 0x69, 0x63, 0x61, 0x00, 0x00, 0x52, 0x00, 0x6f, 0x00, 0x62, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x4d, 0x00, 0x6f, + 0x00, 0x63, 0x00, 0x63, 0x00, 0x69, 0x00, 0x00, 0x52, 0x6f, 0x62, 0x65, + 0x72, 0x74, 0x6f, 0x20, 0x4d, 0x6f, 0x63, 0x63, 0x69, 0x00, 0x00, 0x43, + 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, + 0x00, 0x68, 0x00, 0x74, 0x00, 0x20, 0x00, 0x28, 0x00, 0x63, 0x00, 0x29, + 0x00, 0x20, 0x00, 0x32, 0x00, 0x30, 0x00, 0x32, 0x00, 0x30, 0x00, 0x2c, + 0x00, 0x20, 0x00, 0x52, 0x00, 0x6f, 0x00, 0x62, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x28, 0x00, 0x3c, 0x00, 0x70, + 0x00, 0x61, 0x00, 0x74, 0x00, 0x72, 0x00, 0x65, 0x00, 0x6f, 0x00, 0x6e, + 0x00, 0x2e, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6d, 0x00, 0x2f, 0x00, 0x72, + 0x00, 0x6d, 0x00, 0x6f, 0x00, 0x63, 0x00, 0x63, 0x00, 0x69, 0x00, 0x20, + 0x00, 0x7c, 0x00, 0x20, 0x00, 0x64, 0x00, 0x61, 0x00, 0x2e, 0x00, 0x69, + 0x00, 0x6f, 0x00, 0x73, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x6f, 0x00, 0x72, + 0x00, 0x40, 0x00, 0x67, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x69, 0x00, 0x6c, + 0x00, 0x2e, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6d, 0x00, 0x3e, 0x00, 0x29, + 0x00, 0x2c, 0x00, 0x0a, 0x00, 0x77, 0x00, 0x69, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x20, 0x00, 0x52, 0x00, 0x65, 0x00, 0x73, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x76, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x4e, 0x00, 0x61, 0x00, 0x6d, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x44, 0x00, 0x6f, 0x00, 0x67, 0x00, 0x69, + 0x00, 0x63, 0x00, 0x61, 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x54, + 0x00, 0x68, 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, + 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x75, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x53, 0x00, 0x49, + 0x00, 0x4c, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x70, 0x00, 0x65, 0x00, 0x6e, + 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x4c, 0x00, 0x69, 0x00, 0x63, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, + 0x00, 0x65, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x56, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x31, + 0x00, 0x2e, 0x00, 0x31, 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x54, 0x00, 0x68, + 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, 0x00, 0x20, 0x00, 0x69, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x69, + 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x6c, + 0x00, 0x6f, 0x00, 0x77, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, + 0x00, 0x64, 0x00, 0x20, 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x6c, 0x00, 0x73, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x61, 0x00, 0x76, + 0x00, 0x61, 0x00, 0x69, 0x00, 0x6c, 0x00, 0x61, 0x00, 0x62, 0x00, 0x6c, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x77, 0x00, 0x69, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x20, 0x00, 0x46, 0x00, 0x41, 0x00, 0x51, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x74, 0x00, 0x3a, 0x00, 0x0a, 0x00, 0x68, + 0x00, 0x74, 0x00, 0x74, 0x00, 0x70, 0x00, 0x3a, 0x00, 0x2f, 0x00, 0x2f, + 0x00, 0x73, 0x00, 0x63, 0x00, 0x72, 0x00, 0x69, 0x00, 0x70, 0x00, 0x74, + 0x00, 0x73, 0x00, 0x2e, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6c, 0x00, 0x2e, + 0x00, 0x6f, 0x00, 0x72, 0x00, 0x67, 0x00, 0x2f, 0x00, 0x4f, 0x00, 0x46, + 0x00, 0x4c, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x0a, 0x00, 0x53, 0x00, 0x49, + 0x00, 0x4c, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x50, 0x00, 0x45, 0x00, 0x4e, + 0x00, 0x20, 0x00, 0x46, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x54, 0x00, 0x20, + 0x00, 0x4c, 0x00, 0x49, 0x00, 0x43, 0x00, 0x45, 0x00, 0x4e, 0x00, 0x53, + 0x00, 0x45, 0x00, 0x20, 0x00, 0x56, 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, + 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x31, 0x00, 0x2e, + 0x00, 0x31, 0x00, 0x20, 0x00, 0x2d, 0x00, 0x20, 0x00, 0x32, 0x00, 0x36, + 0x00, 0x20, 0x00, 0x46, 0x00, 0x65, 0x00, 0x62, 0x00, 0x72, 0x00, 0x75, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x79, 0x00, 0x20, 0x00, 0x32, 0x00, 0x30, + 0x00, 0x30, 0x00, 0x37, 0x00, 0x0a, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x2d, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x50, 0x00, 0x52, + 0x00, 0x45, 0x00, 0x41, 0x00, 0x4d, 0x00, 0x42, 0x00, 0x4c, 0x00, 0x45, + 0x00, 0x0a, 0x00, 0x54, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x67, + 0x00, 0x6f, 0x00, 0x61, 0x00, 0x6c, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x4f, 0x00, 0x70, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x46, + 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x4c, 0x00, 0x69, + 0x00, 0x63, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x28, 0x00, 0x4f, 0x00, 0x46, 0x00, 0x4c, 0x00, 0x29, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x73, 0x00, 0x74, 0x00, 0x69, 0x00, 0x6d, 0x00, 0x75, + 0x00, 0x6c, 0x00, 0x61, 0x00, 0x74, 0x00, 0x65, 0x00, 0x20, 0x00, 0x77, + 0x00, 0x6f, 0x00, 0x72, 0x00, 0x6c, 0x00, 0x64, 0x00, 0x77, 0x00, 0x69, + 0x00, 0x64, 0x00, 0x65, 0x00, 0x0a, 0x00, 0x64, 0x00, 0x65, 0x00, 0x76, + 0x00, 0x65, 0x00, 0x6c, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x6d, 0x00, 0x65, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, + 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x6c, 0x00, 0x61, 0x00, 0x62, + 0x00, 0x6f, 0x00, 0x72, 0x00, 0x61, 0x00, 0x74, 0x00, 0x69, 0x00, 0x76, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x70, 0x00, 0x72, 0x00, 0x6f, 0x00, 0x6a, 0x00, 0x65, + 0x00, 0x63, 0x00, 0x74, 0x00, 0x73, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x6f, 0x00, 0x20, 0x00, 0x73, 0x00, 0x75, 0x00, 0x70, 0x00, 0x70, + 0x00, 0x6f, 0x00, 0x72, 0x00, 0x74, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x63, 0x00, 0x72, 0x00, 0x65, 0x00, 0x61, 0x00, 0x74, + 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x0a, 0x00, 0x65, 0x00, 0x66, + 0x00, 0x66, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x61, 0x00, 0x63, 0x00, 0x61, + 0x00, 0x64, 0x00, 0x65, 0x00, 0x6d, 0x00, 0x69, 0x00, 0x63, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, + 0x00, 0x6e, 0x00, 0x67, 0x00, 0x75, 0x00, 0x69, 0x00, 0x73, 0x00, 0x74, + 0x00, 0x69, 0x00, 0x63, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6d, + 0x00, 0x6d, 0x00, 0x75, 0x00, 0x6e, 0x00, 0x69, 0x00, 0x74, 0x00, 0x69, + 0x00, 0x65, 0x00, 0x73, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, + 0x00, 0x64, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x70, + 0x00, 0x72, 0x00, 0x6f, 0x00, 0x76, 0x00, 0x69, 0x00, 0x64, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x20, 0x00, 0x66, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x0a, + 0x00, 0x6f, 0x00, 0x70, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x66, + 0x00, 0x72, 0x00, 0x61, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x77, 0x00, 0x6f, + 0x00, 0x72, 0x00, 0x6b, 0x00, 0x20, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x20, + 0x00, 0x77, 0x00, 0x68, 0x00, 0x69, 0x00, 0x63, 0x00, 0x68, 0x00, 0x20, + 0x00, 0x66, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x6d, 0x00, 0x61, 0x00, 0x79, 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x73, 0x00, 0x68, 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x69, 0x00, 0x6d, 0x00, 0x70, 0x00, 0x72, 0x00, 0x6f, 0x00, 0x76, + 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x20, + 0x00, 0x70, 0x00, 0x61, 0x00, 0x72, 0x00, 0x74, 0x00, 0x6e, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x73, 0x00, 0x68, 0x00, 0x69, 0x00, 0x70, 0x00, 0x0a, + 0x00, 0x77, 0x00, 0x69, 0x00, 0x74, 0x00, 0x68, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x2e, + 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x54, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x4f, 0x00, 0x46, 0x00, 0x4c, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6c, + 0x00, 0x6c, 0x00, 0x6f, 0x00, 0x77, 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x66, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x75, 0x00, 0x73, 0x00, 0x65, 0x00, 0x64, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x73, 0x00, 0x74, 0x00, 0x75, 0x00, 0x64, 0x00, 0x69, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x6f, 0x00, 0x64, + 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x0a, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x69, 0x00, 0x73, 0x00, 0x74, 0x00, 0x72, 0x00, 0x69, + 0x00, 0x62, 0x00, 0x75, 0x00, 0x74, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x66, 0x00, 0x72, 0x00, 0x65, 0x00, 0x65, 0x00, 0x6c, 0x00, 0x79, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6c, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x67, 0x00, 0x20, 0x00, 0x61, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x79, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6e, 0x00, 0x6f, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x73, 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x6d, 0x00, 0x73, 0x00, 0x65, 0x00, 0x6c, 0x00, 0x76, 0x00, 0x65, + 0x00, 0x73, 0x00, 0x2e, 0x00, 0x20, 0x00, 0x54, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x0a, 0x00, 0x66, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x73, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x63, 0x00, 0x6c, + 0x00, 0x75, 0x00, 0x64, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x64, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x69, 0x00, 0x76, 0x00, 0x61, 0x00, 0x74, 0x00, 0x69, + 0x00, 0x76, 0x00, 0x65, 0x00, 0x20, 0x00, 0x77, 0x00, 0x6f, 0x00, 0x72, + 0x00, 0x6b, 0x00, 0x73, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x63, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x20, 0x00, 0x62, + 0x00, 0x75, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x6c, 0x00, 0x65, 0x00, 0x64, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x65, 0x00, 0x6d, 0x00, 0x62, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x64, 0x00, 0x65, 0x00, 0x64, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x0a, 0x00, 0x72, 0x00, 0x65, 0x00, 0x64, 0x00, 0x69, 0x00, 0x73, + 0x00, 0x74, 0x00, 0x72, 0x00, 0x69, 0x00, 0x62, 0x00, 0x75, 0x00, 0x74, + 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, + 0x00, 0x2f, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, 0x00, 0x73, 0x00, 0x6f, + 0x00, 0x6c, 0x00, 0x64, 0x00, 0x20, 0x00, 0x77, 0x00, 0x69, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, + 0x00, 0x73, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, + 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, 0x00, 0x70, 0x00, 0x72, 0x00, 0x6f, + 0x00, 0x76, 0x00, 0x69, 0x00, 0x64, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x61, 0x00, 0x74, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, 0x00, 0x73, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x76, 0x00, 0x65, 0x00, 0x64, 0x00, 0x0a, + 0x00, 0x6e, 0x00, 0x61, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6e, 0x00, 0x6f, + 0x00, 0x74, 0x00, 0x20, 0x00, 0x75, 0x00, 0x73, 0x00, 0x65, 0x00, 0x64, + 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, 0x00, 0x64, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x69, 0x00, 0x76, 0x00, 0x61, 0x00, 0x74, 0x00, 0x69, + 0x00, 0x76, 0x00, 0x65, 0x00, 0x20, 0x00, 0x77, 0x00, 0x6f, 0x00, 0x72, + 0x00, 0x6b, 0x00, 0x73, 0x00, 0x2e, 0x00, 0x20, 0x00, 0x54, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x69, 0x00, 0x76, 0x00, 0x61, + 0x00, 0x74, 0x00, 0x69, 0x00, 0x76, 0x00, 0x65, 0x00, 0x73, 0x00, 0x2c, + 0x00, 0x0a, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x77, 0x00, 0x65, 0x00, 0x76, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x63, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x6e, 0x00, 0x6f, 0x00, 0x74, 0x00, 0x20, 0x00, 0x62, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, 0x00, 0x6c, 0x00, 0x65, + 0x00, 0x61, 0x00, 0x73, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x75, + 0x00, 0x6e, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x74, 0x00, 0x79, 0x00, 0x70, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x6c, + 0x00, 0x69, 0x00, 0x63, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, + 0x00, 0x2e, 0x00, 0x20, 0x00, 0x54, 0x00, 0x68, 0x00, 0x65, 0x00, 0x0a, + 0x00, 0x72, 0x00, 0x65, 0x00, 0x71, 0x00, 0x75, 0x00, 0x69, 0x00, 0x72, + 0x00, 0x65, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x66, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x69, + 0x00, 0x6e, 0x00, 0x20, 0x00, 0x75, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x69, 0x00, 0x73, + 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, 0x00, 0x65, 0x00, 0x6e, + 0x00, 0x73, 0x00, 0x65, 0x00, 0x20, 0x00, 0x64, 0x00, 0x6f, 0x00, 0x65, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x6e, 0x00, 0x6f, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x70, 0x00, 0x70, 0x00, 0x6c, 0x00, 0x79, 0x00, 0x0a, + 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, + 0x00, 0x20, 0x00, 0x64, 0x00, 0x6f, 0x00, 0x63, 0x00, 0x75, 0x00, 0x6d, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x63, 0x00, 0x72, + 0x00, 0x65, 0x00, 0x61, 0x00, 0x74, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x75, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x72, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x69, 0x00, 0x72, + 0x00, 0x20, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x69, 0x00, 0x76, + 0x00, 0x61, 0x00, 0x74, 0x00, 0x69, 0x00, 0x76, 0x00, 0x65, 0x00, 0x73, + 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x44, 0x00, 0x45, 0x00, 0x46, + 0x00, 0x49, 0x00, 0x4e, 0x00, 0x49, 0x00, 0x54, 0x00, 0x49, 0x00, 0x4f, + 0x00, 0x4e, 0x00, 0x53, 0x00, 0x0a, 0x00, 0x22, 0x00, 0x46, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, + 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x22, + 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, 0x00, 0x66, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x73, 0x00, 0x65, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x66, 0x00, 0x69, + 0x00, 0x6c, 0x00, 0x65, 0x00, 0x73, 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x6c, 0x00, 0x65, 0x00, 0x61, 0x00, 0x73, 0x00, 0x65, 0x00, 0x64, + 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x43, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, + 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, 0x00, 0x68, 0x00, 0x74, 0x00, 0x0a, + 0x00, 0x48, 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x28, 0x00, 0x73, 0x00, 0x29, 0x00, 0x20, 0x00, 0x75, 0x00, 0x6e, + 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6c, 0x00, 0x65, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x6c, 0x00, 0x79, 0x00, 0x20, 0x00, 0x6d, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x6b, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x73, 0x00, 0x20, 0x00, 0x73, 0x00, 0x75, 0x00, 0x63, + 0x00, 0x68, 0x00, 0x2e, 0x00, 0x20, 0x00, 0x54, 0x00, 0x68, 0x00, 0x69, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x79, 0x00, 0x0a, + 0x00, 0x69, 0x00, 0x6e, 0x00, 0x63, 0x00, 0x6c, 0x00, 0x75, 0x00, 0x64, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x73, 0x00, 0x6f, 0x00, 0x75, 0x00, 0x72, + 0x00, 0x63, 0x00, 0x65, 0x00, 0x20, 0x00, 0x66, 0x00, 0x69, 0x00, 0x6c, + 0x00, 0x65, 0x00, 0x73, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x62, 0x00, 0x75, + 0x00, 0x69, 0x00, 0x6c, 0x00, 0x64, 0x00, 0x20, 0x00, 0x73, 0x00, 0x63, + 0x00, 0x72, 0x00, 0x69, 0x00, 0x70, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, 0x00, 0x64, 0x00, 0x6f, + 0x00, 0x63, 0x00, 0x75, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x61, 0x00, 0x74, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x2e, + 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x22, 0x00, 0x52, 0x00, 0x65, 0x00, 0x73, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x76, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x4e, + 0x00, 0x61, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x22, 0x00, 0x20, 0x00, 0x72, + 0x00, 0x65, 0x00, 0x66, 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, + 0x00, 0x20, 0x00, 0x6e, 0x00, 0x61, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x73, + 0x00, 0x20, 0x00, 0x73, 0x00, 0x70, 0x00, 0x65, 0x00, 0x63, 0x00, 0x69, + 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x73, 0x00, 0x75, 0x00, 0x63, 0x00, 0x68, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x66, 0x00, 0x74, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x0a, 0x00, 0x63, + 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, + 0x00, 0x68, 0x00, 0x74, 0x00, 0x20, 0x00, 0x73, 0x00, 0x74, 0x00, 0x61, + 0x00, 0x74, 0x00, 0x65, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x28, 0x00, 0x73, 0x00, 0x29, 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, + 0x00, 0x22, 0x00, 0x4f, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, 0x00, 0x69, + 0x00, 0x6e, 0x00, 0x61, 0x00, 0x6c, 0x00, 0x20, 0x00, 0x56, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x22, + 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, 0x00, 0x66, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6c, + 0x00, 0x6c, 0x00, 0x65, 0x00, 0x63, 0x00, 0x74, 0x00, 0x69, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x46, + 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x53, 0x00, 0x6f, + 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6d, 0x00, 0x70, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x73, 0x00, 0x0a, 0x00, 0x64, 0x00, 0x69, 0x00, 0x73, + 0x00, 0x74, 0x00, 0x72, 0x00, 0x69, 0x00, 0x62, 0x00, 0x75, 0x00, 0x74, + 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x43, 0x00, 0x6f, + 0x00, 0x70, 0x00, 0x79, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, 0x00, 0x68, + 0x00, 0x74, 0x00, 0x20, 0x00, 0x48, 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x64, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x28, 0x00, 0x73, 0x00, 0x29, 0x00, 0x2e, + 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x22, 0x00, 0x4d, 0x00, 0x6f, 0x00, 0x64, + 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x56, 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x22, 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, 0x00, 0x66, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x64, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x69, 0x00, 0x76, 0x00, 0x61, 0x00, 0x74, + 0x00, 0x69, 0x00, 0x76, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x61, + 0x00, 0x64, 0x00, 0x65, 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x64, 0x00, 0x64, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x67, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x64, + 0x00, 0x65, 0x00, 0x6c, 0x00, 0x65, 0x00, 0x74, 0x00, 0x69, 0x00, 0x6e, + 0x00, 0x67, 0x00, 0x2c, 0x00, 0x0a, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, + 0x00, 0x73, 0x00, 0x75, 0x00, 0x62, 0x00, 0x73, 0x00, 0x74, 0x00, 0x69, + 0x00, 0x74, 0x00, 0x75, 0x00, 0x74, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x67, + 0x00, 0x20, 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x20, 0x00, 0x69, 0x00, 0x6e, + 0x00, 0x20, 0x00, 0x70, 0x00, 0x61, 0x00, 0x72, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x20, + 0x00, 0x77, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x2d, 0x00, 0x2d, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, + 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6d, 0x00, 0x70, + 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x73, + 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x0a, 0x00, 0x4f, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, + 0x00, 0x69, 0x00, 0x6e, 0x00, 0x61, 0x00, 0x6c, 0x00, 0x20, 0x00, 0x56, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, 0x00, 0x63, + 0x00, 0x68, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x69, 0x00, 0x6e, + 0x00, 0x67, 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x6d, + 0x00, 0x61, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x72, + 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, 0x00, 0x70, 0x00, 0x6f, + 0x00, 0x72, 0x00, 0x74, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, + 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x61, 0x00, 0x0a, 0x00, 0x6e, + 0x00, 0x65, 0x00, 0x77, 0x00, 0x20, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x76, + 0x00, 0x69, 0x00, 0x72, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x6d, 0x00, 0x65, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x22, + 0x00, 0x41, 0x00, 0x75, 0x00, 0x74, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x72, + 0x00, 0x22, 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, 0x00, 0x66, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x64, 0x00, 0x65, + 0x00, 0x73, 0x00, 0x69, 0x00, 0x67, 0x00, 0x6e, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x69, + 0x00, 0x6e, 0x00, 0x65, 0x00, 0x65, 0x00, 0x72, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x70, 0x00, 0x72, 0x00, 0x6f, 0x00, 0x67, 0x00, 0x72, 0x00, 0x61, + 0x00, 0x6d, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x72, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x65, 0x00, 0x63, 0x00, 0x68, 0x00, 0x6e, 0x00, 0x69, + 0x00, 0x63, 0x00, 0x61, 0x00, 0x6c, 0x00, 0x0a, 0x00, 0x77, 0x00, 0x72, + 0x00, 0x69, 0x00, 0x74, 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x72, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x20, 0x00, 0x70, 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, + 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x77, 0x00, 0x68, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x72, + 0x00, 0x69, 0x00, 0x62, 0x00, 0x75, 0x00, 0x74, 0x00, 0x65, 0x00, 0x64, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, + 0x00, 0x50, 0x00, 0x45, 0x00, 0x52, 0x00, 0x4d, 0x00, 0x49, 0x00, 0x53, + 0x00, 0x53, 0x00, 0x49, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x20, 0x00, 0x26, + 0x00, 0x20, 0x00, 0x43, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x44, 0x00, 0x49, + 0x00, 0x54, 0x00, 0x49, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x53, 0x00, 0x0a, + 0x00, 0x50, 0x00, 0x65, 0x00, 0x72, 0x00, 0x6d, 0x00, 0x69, 0x00, 0x73, + 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x69, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x68, 0x00, 0x65, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, 0x00, 0x67, 0x00, 0x72, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x65, 0x00, 0x64, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x66, 0x00, 0x72, 0x00, 0x65, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x66, 0x00, 0x20, 0x00, 0x63, 0x00, 0x68, 0x00, 0x61, 0x00, 0x72, + 0x00, 0x67, 0x00, 0x65, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x70, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x62, 0x00, 0x74, 0x00, 0x61, 0x00, 0x69, 0x00, 0x6e, + 0x00, 0x69, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x0a, 0x00, 0x61, 0x00, 0x20, + 0x00, 0x63, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x53, + 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, + 0x00, 0x65, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, + 0x00, 0x75, 0x00, 0x73, 0x00, 0x65, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x73, + 0x00, 0x74, 0x00, 0x75, 0x00, 0x64, 0x00, 0x79, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x63, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x6d, 0x00, 0x65, 0x00, 0x72, 0x00, 0x67, 0x00, 0x65, 0x00, 0x2c, + 0x00, 0x20, 0x00, 0x65, 0x00, 0x6d, 0x00, 0x62, 0x00, 0x65, 0x00, 0x64, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x6f, 0x00, 0x64, 0x00, 0x69, + 0x00, 0x66, 0x00, 0x79, 0x00, 0x2c, 0x00, 0x0a, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x69, 0x00, 0x73, 0x00, 0x74, 0x00, 0x72, 0x00, 0x69, + 0x00, 0x62, 0x00, 0x75, 0x00, 0x74, 0x00, 0x65, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, 0x00, 0x73, 0x00, 0x65, + 0x00, 0x6c, 0x00, 0x6c, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x6f, 0x00, 0x64, + 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, 0x00, 0x75, 0x00, 0x6e, + 0x00, 0x6d, 0x00, 0x6f, 0x00, 0x64, 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, + 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x70, + 0x00, 0x69, 0x00, 0x65, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x46, + 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x0a, 0x00, 0x53, 0x00, 0x6f, + 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x73, 0x00, 0x75, 0x00, 0x62, 0x00, 0x6a, + 0x00, 0x65, 0x00, 0x63, 0x00, 0x74, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x66, + 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x6c, 0x00, 0x6f, 0x00, 0x77, 0x00, 0x69, + 0x00, 0x6e, 0x00, 0x67, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6e, + 0x00, 0x64, 0x00, 0x69, 0x00, 0x74, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, + 0x00, 0x73, 0x00, 0x3a, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x31, 0x00, 0x29, + 0x00, 0x20, 0x00, 0x4e, 0x00, 0x65, 0x00, 0x69, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, + 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6e, 0x00, 0x6f, 0x00, 0x72, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x66, 0x00, 0x20, 0x00, 0x69, 0x00, 0x74, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x69, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x69, 0x00, 0x76, 0x00, 0x69, + 0x00, 0x64, 0x00, 0x75, 0x00, 0x61, 0x00, 0x6c, 0x00, 0x20, 0x00, 0x63, + 0x00, 0x6f, 0x00, 0x6d, 0x00, 0x70, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x65, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x73, 0x00, 0x2c, 0x00, 0x0a, 0x00, 0x69, + 0x00, 0x6e, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, + 0x00, 0x69, 0x00, 0x6e, 0x00, 0x61, 0x00, 0x6c, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x72, 0x00, 0x20, 0x00, 0x4d, 0x00, 0x6f, 0x00, 0x64, 0x00, 0x69, + 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x56, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, + 0x00, 0x73, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x79, + 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x20, 0x00, 0x73, 0x00, 0x6f, + 0x00, 0x6c, 0x00, 0x64, 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, + 0x00, 0x69, 0x00, 0x74, 0x00, 0x73, 0x00, 0x65, 0x00, 0x6c, 0x00, 0x66, + 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x32, 0x00, 0x29, 0x00, 0x20, + 0x00, 0x4f, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, 0x00, 0x69, 0x00, 0x6e, + 0x00, 0x61, 0x00, 0x6c, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, + 0x00, 0x4d, 0x00, 0x6f, 0x00, 0x64, 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, + 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x56, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, + 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x79, + 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x20, 0x00, 0x62, 0x00, 0x75, + 0x00, 0x6e, 0x00, 0x64, 0x00, 0x6c, 0x00, 0x65, 0x00, 0x64, 0x00, 0x2c, + 0x00, 0x0a, 0x00, 0x72, 0x00, 0x65, 0x00, 0x64, 0x00, 0x69, 0x00, 0x73, + 0x00, 0x74, 0x00, 0x72, 0x00, 0x69, 0x00, 0x62, 0x00, 0x75, 0x00, 0x74, + 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, + 0x00, 0x2f, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, 0x00, 0x73, 0x00, 0x6f, + 0x00, 0x6c, 0x00, 0x64, 0x00, 0x20, 0x00, 0x77, 0x00, 0x69, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, + 0x00, 0x73, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, + 0x00, 0x72, 0x00, 0x65, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x70, 0x00, 0x72, + 0x00, 0x6f, 0x00, 0x76, 0x00, 0x69, 0x00, 0x64, 0x00, 0x65, 0x00, 0x64, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x61, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x65, 0x00, 0x61, 0x00, 0x63, 0x00, 0x68, 0x00, 0x20, 0x00, 0x63, + 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, 0x00, 0x0a, 0x00, 0x63, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x74, 0x00, 0x61, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x73, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x62, 0x00, 0x6f, 0x00, 0x76, 0x00, 0x65, 0x00, 0x20, 0x00, 0x63, + 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, + 0x00, 0x68, 0x00, 0x74, 0x00, 0x20, 0x00, 0x6e, 0x00, 0x6f, 0x00, 0x74, + 0x00, 0x69, 0x00, 0x63, 0x00, 0x65, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, + 0x00, 0x64, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x69, 0x00, 0x73, + 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, 0x00, 0x65, 0x00, 0x6e, + 0x00, 0x73, 0x00, 0x65, 0x00, 0x2e, 0x00, 0x20, 0x00, 0x54, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x73, 0x00, 0x65, 0x00, 0x20, 0x00, 0x63, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x0a, 0x00, 0x69, + 0x00, 0x6e, 0x00, 0x63, 0x00, 0x6c, 0x00, 0x75, 0x00, 0x64, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x20, 0x00, 0x65, 0x00, 0x69, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x61, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x73, 0x00, 0x74, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x2d, + 0x00, 0x61, 0x00, 0x6c, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x65, 0x00, 0x78, 0x00, 0x74, 0x00, 0x20, 0x00, 0x66, + 0x00, 0x69, 0x00, 0x6c, 0x00, 0x65, 0x00, 0x73, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x68, 0x00, 0x75, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x2d, + 0x00, 0x72, 0x00, 0x65, 0x00, 0x61, 0x00, 0x64, 0x00, 0x61, 0x00, 0x62, + 0x00, 0x6c, 0x00, 0x65, 0x00, 0x20, 0x00, 0x68, 0x00, 0x65, 0x00, 0x61, + 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x72, 0x00, 0x0a, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x61, 0x00, 0x70, 0x00, 0x70, + 0x00, 0x72, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x72, 0x00, 0x69, 0x00, 0x61, + 0x00, 0x74, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x63, + 0x00, 0x68, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x65, 0x00, 0x2d, 0x00, 0x72, + 0x00, 0x65, 0x00, 0x61, 0x00, 0x64, 0x00, 0x61, 0x00, 0x62, 0x00, 0x6c, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x74, 0x00, 0x61, + 0x00, 0x64, 0x00, 0x61, 0x00, 0x74, 0x00, 0x61, 0x00, 0x20, 0x00, 0x66, + 0x00, 0x69, 0x00, 0x65, 0x00, 0x6c, 0x00, 0x64, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x77, 0x00, 0x69, 0x00, 0x74, 0x00, 0x68, 0x00, 0x69, 0x00, 0x6e, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x65, 0x00, 0x78, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x72, 0x00, 0x0a, 0x00, 0x62, 0x00, 0x69, 0x00, 0x6e, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x79, 0x00, 0x20, 0x00, 0x66, 0x00, 0x69, + 0x00, 0x6c, 0x00, 0x65, 0x00, 0x73, 0x00, 0x20, 0x00, 0x61, 0x00, 0x73, + 0x00, 0x20, 0x00, 0x6c, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x6f, + 0x00, 0x73, 0x00, 0x65, 0x00, 0x20, 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, + 0x00, 0x6c, 0x00, 0x64, 0x00, 0x73, 0x00, 0x20, 0x00, 0x63, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x20, 0x00, 0x65, + 0x00, 0x61, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6c, 0x00, 0x79, 0x00, 0x20, + 0x00, 0x76, 0x00, 0x69, 0x00, 0x65, 0x00, 0x77, 0x00, 0x65, 0x00, 0x64, + 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x75, 0x00, 0x73, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x33, 0x00, 0x29, 0x00, 0x20, + 0x00, 0x4e, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x4d, 0x00, 0x6f, 0x00, 0x64, + 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x56, 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, + 0x00, 0x74, 0x00, 0x20, 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, + 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6d, + 0x00, 0x61, 0x00, 0x79, 0x00, 0x20, 0x00, 0x75, 0x00, 0x73, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x52, + 0x00, 0x65, 0x00, 0x73, 0x00, 0x65, 0x00, 0x72, 0x00, 0x76, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x0a, 0x00, 0x4e, 0x00, 0x61, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x28, + 0x00, 0x73, 0x00, 0x29, 0x00, 0x20, 0x00, 0x75, 0x00, 0x6e, 0x00, 0x6c, + 0x00, 0x65, 0x00, 0x73, 0x00, 0x73, 0x00, 0x20, 0x00, 0x65, 0x00, 0x78, + 0x00, 0x70, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, 0x00, 0x69, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x77, 0x00, 0x72, 0x00, 0x69, 0x00, 0x74, 0x00, 0x74, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x70, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x6d, 0x00, 0x69, 0x00, 0x73, 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, + 0x00, 0x6e, 0x00, 0x20, 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x67, + 0x00, 0x72, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x65, 0x00, 0x64, + 0x00, 0x20, 0x00, 0x62, 0x00, 0x79, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x72, + 0x00, 0x65, 0x00, 0x73, 0x00, 0x70, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x64, + 0x00, 0x69, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x0a, 0x00, 0x43, 0x00, 0x6f, + 0x00, 0x70, 0x00, 0x79, 0x00, 0x72, 0x00, 0x69, 0x00, 0x67, 0x00, 0x68, + 0x00, 0x74, 0x00, 0x20, 0x00, 0x48, 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x64, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x2e, 0x00, 0x20, 0x00, 0x54, 0x00, 0x68, + 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, 0x00, 0x73, + 0x00, 0x74, 0x00, 0x72, 0x00, 0x69, 0x00, 0x63, 0x00, 0x74, 0x00, 0x69, + 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x6c, + 0x00, 0x79, 0x00, 0x20, 0x00, 0x61, 0x00, 0x70, 0x00, 0x70, 0x00, 0x6c, + 0x00, 0x69, 0x00, 0x65, 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x70, + 0x00, 0x72, 0x00, 0x69, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x72, 0x00, 0x79, + 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, + 0x00, 0x6e, 0x00, 0x61, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x73, 0x00, 0x0a, 0x00, 0x70, 0x00, 0x72, 0x00, 0x65, 0x00, 0x73, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x75, 0x00, 0x73, 0x00, 0x65, 0x00, 0x72, 0x00, 0x73, + 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x34, 0x00, 0x29, 0x00, 0x20, + 0x00, 0x54, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x6e, 0x00, 0x61, + 0x00, 0x6d, 0x00, 0x65, 0x00, 0x28, 0x00, 0x73, 0x00, 0x29, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x43, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, 0x00, 0x72, + 0x00, 0x69, 0x00, 0x67, 0x00, 0x68, 0x00, 0x74, 0x00, 0x20, 0x00, 0x48, + 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x28, + 0x00, 0x73, 0x00, 0x29, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x41, 0x00, 0x75, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x28, 0x00, 0x73, + 0x00, 0x29, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, + 0x00, 0x74, 0x00, 0x0a, 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, + 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x20, 0x00, 0x73, + 0x00, 0x68, 0x00, 0x61, 0x00, 0x6c, 0x00, 0x6c, 0x00, 0x20, 0x00, 0x6e, + 0x00, 0x6f, 0x00, 0x74, 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x75, 0x00, 0x73, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x6f, 0x00, 0x20, 0x00, 0x70, 0x00, 0x72, 0x00, 0x6f, 0x00, 0x6d, + 0x00, 0x6f, 0x00, 0x74, 0x00, 0x65, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x65, + 0x00, 0x6e, 0x00, 0x64, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x73, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, 0x00, 0x61, 0x00, 0x64, + 0x00, 0x76, 0x00, 0x65, 0x00, 0x72, 0x00, 0x74, 0x00, 0x69, 0x00, 0x73, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x0a, + 0x00, 0x4d, 0x00, 0x6f, 0x00, 0x64, 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, + 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x56, 0x00, 0x65, 0x00, 0x72, + 0x00, 0x73, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x65, 0x00, 0x78, 0x00, 0x63, 0x00, 0x65, 0x00, 0x70, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x20, 0x00, 0x61, 0x00, 0x63, + 0x00, 0x6b, 0x00, 0x6e, 0x00, 0x6f, 0x00, 0x77, 0x00, 0x6c, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x67, 0x00, 0x65, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x72, 0x00, 0x69, 0x00, 0x62, 0x00, 0x75, 0x00, 0x74, 0x00, 0x69, + 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x28, 0x00, 0x73, 0x00, 0x29, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x0a, 0x00, 0x43, 0x00, 0x6f, 0x00, 0x70, 0x00, 0x79, 0x00, 0x72, + 0x00, 0x69, 0x00, 0x67, 0x00, 0x68, 0x00, 0x74, 0x00, 0x20, 0x00, 0x48, + 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x28, + 0x00, 0x73, 0x00, 0x29, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, + 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x41, + 0x00, 0x75, 0x00, 0x74, 0x00, 0x68, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x28, + 0x00, 0x73, 0x00, 0x29, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, + 0x00, 0x77, 0x00, 0x69, 0x00, 0x74, 0x00, 0x68, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x65, 0x00, 0x69, 0x00, 0x72, 0x00, 0x20, 0x00, 0x65, + 0x00, 0x78, 0x00, 0x70, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, 0x00, 0x69, + 0x00, 0x74, 0x00, 0x20, 0x00, 0x77, 0x00, 0x72, 0x00, 0x69, 0x00, 0x74, + 0x00, 0x74, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x0a, 0x00, 0x70, 0x00, 0x65, + 0x00, 0x72, 0x00, 0x6d, 0x00, 0x69, 0x00, 0x73, 0x00, 0x73, 0x00, 0x69, + 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x35, + 0x00, 0x29, 0x00, 0x20, 0x00, 0x54, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, + 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x53, + 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, 0x00, 0x61, 0x00, 0x72, + 0x00, 0x65, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x6f, 0x00, 0x64, + 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x72, 0x00, 0x20, 0x00, 0x75, 0x00, 0x6e, 0x00, 0x6d, + 0x00, 0x6f, 0x00, 0x64, 0x00, 0x69, 0x00, 0x66, 0x00, 0x69, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x20, + 0x00, 0x70, 0x00, 0x61, 0x00, 0x72, 0x00, 0x74, 0x00, 0x20, 0x00, 0x6f, + 0x00, 0x72, 0x00, 0x20, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x77, + 0x00, 0x68, 0x00, 0x6f, 0x00, 0x6c, 0x00, 0x65, 0x00, 0x2c, 0x00, 0x0a, + 0x00, 0x6d, 0x00, 0x75, 0x00, 0x73, 0x00, 0x74, 0x00, 0x20, 0x00, 0x62, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x64, 0x00, 0x69, 0x00, 0x73, 0x00, 0x74, + 0x00, 0x72, 0x00, 0x69, 0x00, 0x62, 0x00, 0x75, 0x00, 0x74, 0x00, 0x65, + 0x00, 0x64, 0x00, 0x20, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x69, + 0x00, 0x72, 0x00, 0x65, 0x00, 0x6c, 0x00, 0x79, 0x00, 0x20, 0x00, 0x75, + 0x00, 0x6e, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x74, + 0x00, 0x68, 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, + 0x00, 0x63, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, 0x00, 0x2c, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, 0x00, 0x6d, + 0x00, 0x75, 0x00, 0x73, 0x00, 0x74, 0x00, 0x20, 0x00, 0x6e, 0x00, 0x6f, + 0x00, 0x74, 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x0a, 0x00, 0x64, + 0x00, 0x69, 0x00, 0x73, 0x00, 0x74, 0x00, 0x72, 0x00, 0x69, 0x00, 0x62, + 0x00, 0x75, 0x00, 0x74, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, 0x75, + 0x00, 0x6e, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x61, + 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x6f, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, 0x00, 0x6c, 0x00, 0x69, 0x00, 0x63, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, 0x00, 0x2e, 0x00, 0x20, + 0x00, 0x54, 0x00, 0x68, 0x00, 0x65, 0x00, 0x20, 0x00, 0x72, 0x00, 0x65, + 0x00, 0x71, 0x00, 0x75, 0x00, 0x69, 0x00, 0x72, 0x00, 0x65, 0x00, 0x6d, + 0x00, 0x65, 0x00, 0x6e, 0x00, 0x74, 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, + 0x00, 0x72, 0x00, 0x20, 0x00, 0x66, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x73, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, 0x00, 0x0a, 0x00, 0x72, + 0x00, 0x65, 0x00, 0x6d, 0x00, 0x61, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x20, + 0x00, 0x75, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x65, 0x00, 0x72, 0x00, 0x20, + 0x00, 0x74, 0x00, 0x68, 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6c, + 0x00, 0x69, 0x00, 0x63, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x64, 0x00, 0x6f, 0x00, 0x65, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x6e, 0x00, 0x6f, 0x00, 0x74, 0x00, 0x20, 0x00, 0x61, 0x00, 0x70, + 0x00, 0x70, 0x00, 0x6c, 0x00, 0x79, 0x00, 0x20, 0x00, 0x74, 0x00, 0x6f, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, 0x00, 0x64, + 0x00, 0x6f, 0x00, 0x63, 0x00, 0x75, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x6e, + 0x00, 0x74, 0x00, 0x20, 0x00, 0x63, 0x00, 0x72, 0x00, 0x65, 0x00, 0x61, + 0x00, 0x74, 0x00, 0x65, 0x00, 0x64, 0x00, 0x0a, 0x00, 0x75, 0x00, 0x73, + 0x00, 0x69, 0x00, 0x6e, 0x00, 0x67, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, + 0x00, 0x65, 0x00, 0x20, 0x00, 0x46, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x74, + 0x00, 0x20, 0x00, 0x53, 0x00, 0x6f, 0x00, 0x66, 0x00, 0x74, 0x00, 0x77, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x2e, 0x00, 0x0a, 0x00, 0x0a, + 0x00, 0x54, 0x00, 0x45, 0x00, 0x52, 0x00, 0x4d, 0x00, 0x49, 0x00, 0x4e, + 0x00, 0x41, 0x00, 0x54, 0x00, 0x49, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x0a, + 0x00, 0x54, 0x00, 0x68, 0x00, 0x69, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6c, + 0x00, 0x69, 0x00, 0x63, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x62, 0x00, 0x65, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6d, + 0x00, 0x65, 0x00, 0x73, 0x00, 0x20, 0x00, 0x6e, 0x00, 0x75, 0x00, 0x6c, + 0x00, 0x6c, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x20, + 0x00, 0x76, 0x00, 0x6f, 0x00, 0x69, 0x00, 0x64, 0x00, 0x20, 0x00, 0x69, + 0x00, 0x66, 0x00, 0x20, 0x00, 0x61, 0x00, 0x6e, 0x00, 0x79, 0x00, 0x20, + 0x00, 0x6f, 0x00, 0x66, 0x00, 0x20, 0x00, 0x74, 0x00, 0x68, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x61, 0x00, 0x62, 0x00, 0x6f, 0x00, 0x76, 0x00, 0x65, + 0x00, 0x20, 0x00, 0x63, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x69, + 0x00, 0x74, 0x00, 0x69, 0x00, 0x6f, 0x00, 0x6e, 0x00, 0x73, 0x00, 0x20, + 0x00, 0x61, 0x00, 0x72, 0x00, 0x65, 0x00, 0x0a, 0x00, 0x6e, 0x00, 0x6f, + 0x00, 0x74, 0x00, 0x20, 0x00, 0x6d, 0x00, 0x65, 0x00, 0x74, 0x00, 0x2e, + 0x00, 0x0a, 0x00, 0x0a, 0x00, 0x44, 0x00, 0x49, 0x00, 0x53, 0x00, 0x43, + 0x00, 0x4c, 0x00, 0x41, 0x00, 0x49, 0x00, 0x4d, 0x00, 0x45, 0x00, 0x52, + 0x00, 0x0a, 0x00, 0x54, 0x00, 0x48, 0x00, 0x45, 0x00, 0x20, 0x00, 0x46, + 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x54, 0x00, 0x20, 0x00, 0x53, 0x00, 0x4f, + 0x00, 0x46, 0x00, 0x54, 0x00, 0x57, 0x00, 0x41, 0x00, 0x52, 0x00, 0x45, + 0x00, 0x20, 0x00, 0x49, 0x00, 0x53, 0x00, 0x20, 0x00, 0x50, 0x00, 0x52, + 0x00, 0x4f, 0x00, 0x56, 0x00, 0x49, 0x00, 0x44, 0x00, 0x45, 0x00, 0x44, + 0x00, 0x20, 0x00, 0x22, 0x00, 0x41, 0x00, 0x53, 0x00, 0x20, 0x00, 0x49, + 0x00, 0x53, 0x00, 0x22, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x57, 0x00, 0x49, + 0x00, 0x54, 0x00, 0x48, 0x00, 0x4f, 0x00, 0x55, 0x00, 0x54, 0x00, 0x20, + 0x00, 0x57, 0x00, 0x41, 0x00, 0x52, 0x00, 0x52, 0x00, 0x41, 0x00, 0x4e, + 0x00, 0x54, 0x00, 0x59, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x46, 0x00, 0x20, + 0x00, 0x41, 0x00, 0x4e, 0x00, 0x59, 0x00, 0x20, 0x00, 0x4b, 0x00, 0x49, + 0x00, 0x4e, 0x00, 0x44, 0x00, 0x2c, 0x00, 0x0a, 0x00, 0x45, 0x00, 0x58, + 0x00, 0x50, 0x00, 0x52, 0x00, 0x45, 0x00, 0x53, 0x00, 0x53, 0x00, 0x20, + 0x00, 0x4f, 0x00, 0x52, 0x00, 0x20, 0x00, 0x49, 0x00, 0x4d, 0x00, 0x50, + 0x00, 0x4c, 0x00, 0x49, 0x00, 0x45, 0x00, 0x44, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x49, 0x00, 0x4e, 0x00, 0x43, 0x00, 0x4c, 0x00, 0x55, 0x00, 0x44, + 0x00, 0x49, 0x00, 0x4e, 0x00, 0x47, 0x00, 0x20, 0x00, 0x42, 0x00, 0x55, + 0x00, 0x54, 0x00, 0x20, 0x00, 0x4e, 0x00, 0x4f, 0x00, 0x54, 0x00, 0x20, + 0x00, 0x4c, 0x00, 0x49, 0x00, 0x4d, 0x00, 0x49, 0x00, 0x54, 0x00, 0x45, + 0x00, 0x44, 0x00, 0x20, 0x00, 0x54, 0x00, 0x4f, 0x00, 0x20, 0x00, 0x41, + 0x00, 0x4e, 0x00, 0x59, 0x00, 0x20, 0x00, 0x57, 0x00, 0x41, 0x00, 0x52, + 0x00, 0x52, 0x00, 0x41, 0x00, 0x4e, 0x00, 0x54, 0x00, 0x49, 0x00, 0x45, + 0x00, 0x53, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x46, 0x00, 0x0a, 0x00, 0x4d, + 0x00, 0x45, 0x00, 0x52, 0x00, 0x43, 0x00, 0x48, 0x00, 0x41, 0x00, 0x4e, + 0x00, 0x54, 0x00, 0x41, 0x00, 0x42, 0x00, 0x49, 0x00, 0x4c, 0x00, 0x49, + 0x00, 0x54, 0x00, 0x59, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x46, 0x00, 0x49, + 0x00, 0x54, 0x00, 0x4e, 0x00, 0x45, 0x00, 0x53, 0x00, 0x53, 0x00, 0x20, + 0x00, 0x46, 0x00, 0x4f, 0x00, 0x52, 0x00, 0x20, 0x00, 0x41, 0x00, 0x20, + 0x00, 0x50, 0x00, 0x41, 0x00, 0x52, 0x00, 0x54, 0x00, 0x49, 0x00, 0x43, + 0x00, 0x55, 0x00, 0x4c, 0x00, 0x41, 0x00, 0x52, 0x00, 0x20, 0x00, 0x50, + 0x00, 0x55, 0x00, 0x52, 0x00, 0x50, 0x00, 0x4f, 0x00, 0x53, 0x00, 0x45, + 0x00, 0x20, 0x00, 0x41, 0x00, 0x4e, 0x00, 0x44, 0x00, 0x20, 0x00, 0x4e, + 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x49, 0x00, 0x4e, 0x00, 0x46, 0x00, 0x52, + 0x00, 0x49, 0x00, 0x4e, 0x00, 0x47, 0x00, 0x45, 0x00, 0x4d, 0x00, 0x45, + 0x00, 0x4e, 0x00, 0x54, 0x00, 0x0a, 0x00, 0x4f, 0x00, 0x46, 0x00, 0x20, + 0x00, 0x43, 0x00, 0x4f, 0x00, 0x50, 0x00, 0x59, 0x00, 0x52, 0x00, 0x49, + 0x00, 0x47, 0x00, 0x48, 0x00, 0x54, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x50, + 0x00, 0x41, 0x00, 0x54, 0x00, 0x45, 0x00, 0x4e, 0x00, 0x54, 0x00, 0x2c, + 0x00, 0x20, 0x00, 0x54, 0x00, 0x52, 0x00, 0x41, 0x00, 0x44, 0x00, 0x45, + 0x00, 0x4d, 0x00, 0x41, 0x00, 0x52, 0x00, 0x4b, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x4f, 0x00, 0x52, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x54, 0x00, 0x48, + 0x00, 0x45, 0x00, 0x52, 0x00, 0x20, 0x00, 0x52, 0x00, 0x49, 0x00, 0x47, + 0x00, 0x48, 0x00, 0x54, 0x00, 0x2e, 0x00, 0x20, 0x00, 0x49, 0x00, 0x4e, + 0x00, 0x20, 0x00, 0x4e, 0x00, 0x4f, 0x00, 0x20, 0x00, 0x45, 0x00, 0x56, + 0x00, 0x45, 0x00, 0x4e, 0x00, 0x54, 0x00, 0x20, 0x00, 0x53, 0x00, 0x48, + 0x00, 0x41, 0x00, 0x4c, 0x00, 0x4c, 0x00, 0x20, 0x00, 0x54, 0x00, 0x48, + 0x00, 0x45, 0x00, 0x0a, 0x00, 0x43, 0x00, 0x4f, 0x00, 0x50, 0x00, 0x59, + 0x00, 0x52, 0x00, 0x49, 0x00, 0x47, 0x00, 0x48, 0x00, 0x54, 0x00, 0x20, + 0x00, 0x48, 0x00, 0x4f, 0x00, 0x4c, 0x00, 0x44, 0x00, 0x45, 0x00, 0x52, + 0x00, 0x20, 0x00, 0x42, 0x00, 0x45, 0x00, 0x20, 0x00, 0x4c, 0x00, 0x49, + 0x00, 0x41, 0x00, 0x42, 0x00, 0x4c, 0x00, 0x45, 0x00, 0x20, 0x00, 0x46, + 0x00, 0x4f, 0x00, 0x52, 0x00, 0x20, 0x00, 0x41, 0x00, 0x4e, 0x00, 0x59, + 0x00, 0x20, 0x00, 0x43, 0x00, 0x4c, 0x00, 0x41, 0x00, 0x49, 0x00, 0x4d, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x44, 0x00, 0x41, 0x00, 0x4d, 0x00, 0x41, + 0x00, 0x47, 0x00, 0x45, 0x00, 0x53, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x52, + 0x00, 0x20, 0x00, 0x4f, 0x00, 0x54, 0x00, 0x48, 0x00, 0x45, 0x00, 0x52, + 0x00, 0x20, 0x00, 0x4c, 0x00, 0x49, 0x00, 0x41, 0x00, 0x42, 0x00, 0x49, + 0x00, 0x4c, 0x00, 0x49, 0x00, 0x54, 0x00, 0x59, 0x00, 0x2c, 0x00, 0x0a, + 0x00, 0x49, 0x00, 0x4e, 0x00, 0x43, 0x00, 0x4c, 0x00, 0x55, 0x00, 0x44, + 0x00, 0x49, 0x00, 0x4e, 0x00, 0x47, 0x00, 0x20, 0x00, 0x41, 0x00, 0x4e, + 0x00, 0x59, 0x00, 0x20, 0x00, 0x47, 0x00, 0x45, 0x00, 0x4e, 0x00, 0x45, + 0x00, 0x52, 0x00, 0x41, 0x00, 0x4c, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x53, + 0x00, 0x50, 0x00, 0x45, 0x00, 0x43, 0x00, 0x49, 0x00, 0x41, 0x00, 0x4c, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x49, 0x00, 0x4e, 0x00, 0x44, 0x00, 0x49, + 0x00, 0x52, 0x00, 0x45, 0x00, 0x43, 0x00, 0x54, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x49, 0x00, 0x4e, 0x00, 0x43, 0x00, 0x49, 0x00, 0x44, 0x00, 0x45, + 0x00, 0x4e, 0x00, 0x54, 0x00, 0x41, 0x00, 0x4c, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x4f, 0x00, 0x52, 0x00, 0x20, 0x00, 0x43, 0x00, 0x4f, 0x00, 0x4e, + 0x00, 0x53, 0x00, 0x45, 0x00, 0x51, 0x00, 0x55, 0x00, 0x45, 0x00, 0x4e, + 0x00, 0x54, 0x00, 0x49, 0x00, 0x41, 0x00, 0x4c, 0x00, 0x0a, 0x00, 0x44, + 0x00, 0x41, 0x00, 0x4d, 0x00, 0x41, 0x00, 0x47, 0x00, 0x45, 0x00, 0x53, + 0x00, 0x2c, 0x00, 0x20, 0x00, 0x57, 0x00, 0x48, 0x00, 0x45, 0x00, 0x54, + 0x00, 0x48, 0x00, 0x45, 0x00, 0x52, 0x00, 0x20, 0x00, 0x49, 0x00, 0x4e, + 0x00, 0x20, 0x00, 0x41, 0x00, 0x4e, 0x00, 0x20, 0x00, 0x41, 0x00, 0x43, + 0x00, 0x54, 0x00, 0x49, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x20, 0x00, 0x4f, + 0x00, 0x46, 0x00, 0x20, 0x00, 0x43, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x54, + 0x00, 0x52, 0x00, 0x41, 0x00, 0x43, 0x00, 0x54, 0x00, 0x2c, 0x00, 0x20, + 0x00, 0x54, 0x00, 0x4f, 0x00, 0x52, 0x00, 0x54, 0x00, 0x20, 0x00, 0x4f, + 0x00, 0x52, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x54, 0x00, 0x48, 0x00, 0x45, + 0x00, 0x52, 0x00, 0x57, 0x00, 0x49, 0x00, 0x53, 0x00, 0x45, 0x00, 0x2c, + 0x00, 0x20, 0x00, 0x41, 0x00, 0x52, 0x00, 0x49, 0x00, 0x53, 0x00, 0x49, + 0x00, 0x4e, 0x00, 0x47, 0x00, 0x0a, 0x00, 0x46, 0x00, 0x52, 0x00, 0x4f, + 0x00, 0x4d, 0x00, 0x2c, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x55, 0x00, 0x54, + 0x00, 0x20, 0x00, 0x4f, 0x00, 0x46, 0x00, 0x20, 0x00, 0x54, 0x00, 0x48, + 0x00, 0x45, 0x00, 0x20, 0x00, 0x55, 0x00, 0x53, 0x00, 0x45, 0x00, 0x20, + 0x00, 0x4f, 0x00, 0x52, 0x00, 0x20, 0x00, 0x49, 0x00, 0x4e, 0x00, 0x41, + 0x00, 0x42, 0x00, 0x49, 0x00, 0x4c, 0x00, 0x49, 0x00, 0x54, 0x00, 0x59, + 0x00, 0x20, 0x00, 0x54, 0x00, 0x4f, 0x00, 0x20, 0x00, 0x55, 0x00, 0x53, + 0x00, 0x45, 0x00, 0x20, 0x00, 0x54, 0x00, 0x48, 0x00, 0x45, 0x00, 0x20, + 0x00, 0x46, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x54, 0x00, 0x20, 0x00, 0x53, + 0x00, 0x4f, 0x00, 0x46, 0x00, 0x54, 0x00, 0x57, 0x00, 0x41, 0x00, 0x52, + 0x00, 0x45, 0x00, 0x20, 0x00, 0x4f, 0x00, 0x52, 0x00, 0x20, 0x00, 0x46, + 0x00, 0x52, 0x00, 0x4f, 0x00, 0x4d, 0x00, 0x0a, 0x00, 0x4f, 0x00, 0x54, + 0x00, 0x48, 0x00, 0x45, 0x00, 0x52, 0x00, 0x20, 0x00, 0x44, 0x00, 0x45, + 0x00, 0x41, 0x00, 0x4c, 0x00, 0x49, 0x00, 0x4e, 0x00, 0x47, 0x00, 0x53, + 0x00, 0x20, 0x00, 0x49, 0x00, 0x4e, 0x00, 0x20, 0x00, 0x54, 0x00, 0x48, + 0x00, 0x45, 0x00, 0x20, 0x00, 0x46, 0x00, 0x4f, 0x00, 0x4e, 0x00, 0x54, + 0x00, 0x20, 0x00, 0x53, 0x00, 0x4f, 0x00, 0x46, 0x00, 0x54, 0x00, 0x57, + 0x00, 0x41, 0x00, 0x52, 0x00, 0x45, 0x00, 0x2e, 0x00, 0x00, 0x43, 0x6f, + 0x70, 0x79, 0x72, 0x69, 0x67, 0x68, 0x74, 0x20, 0x28, 0x63, 0x29, 0x20, + 0x32, 0x30, 0x32, 0x30, 0x2c, 0x20, 0x52, 0x6f, 0x62, 0x65, 0x72, 0x74, + 0x6f, 0x20, 0x28, 0x3c, 0x70, 0x61, 0x74, 0x72, 0x65, 0x6f, 0x6e, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x72, 0x6d, 0x6f, 0x63, 0x63, 0x69, 0x20, 0x7c, + 0x20, 0x64, 0x61, 0x2e, 0x69, 0x6f, 0x73, 0x6f, 0x6e, 0x6f, 0x72, 0x40, + 0x67, 0x6d, 0x61, 0x69, 0x6c, 0x2e, 0x63, 0x6f, 0x6d, 0x3e, 0x29, 0x2c, + 0x0a, 0x77, 0x69, 0x74, 0x68, 0x20, 0x52, 0x65, 0x73, 0x65, 0x72, 0x76, + 0x65, 0x64, 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x4e, 0x61, 0x6d, 0x65, + 0x20, 0x44, 0x6f, 0x67, 0x69, 0x63, 0x61, 0x2e, 0x0a, 0x0a, 0x54, 0x68, + 0x69, 0x73, 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x53, 0x6f, 0x66, 0x74, + 0x77, 0x61, 0x72, 0x65, 0x20, 0x69, 0x73, 0x20, 0x6c, 0x69, 0x63, 0x65, + 0x6e, 0x73, 0x65, 0x64, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x20, 0x74, + 0x68, 0x65, 0x20, 0x53, 0x49, 0x4c, 0x20, 0x4f, 0x70, 0x65, 0x6e, 0x20, + 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, + 0x2c, 0x20, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x31, 0x2e, + 0x31, 0x2e, 0x0a, 0x54, 0x68, 0x69, 0x73, 0x20, 0x6c, 0x69, 0x63, 0x65, + 0x6e, 0x73, 0x65, 0x20, 0x69, 0x73, 0x20, 0x63, 0x6f, 0x70, 0x69, 0x65, + 0x64, 0x20, 0x62, 0x65, 0x6c, 0x6f, 0x77, 0x2c, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x69, 0x73, 0x20, 0x61, 0x6c, 0x73, 0x6f, 0x20, 0x61, 0x76, 0x61, + 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, + 0x61, 0x20, 0x46, 0x41, 0x51, 0x20, 0x61, 0x74, 0x3a, 0x0a, 0x68, 0x74, + 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, + 0x2e, 0x73, 0x69, 0x6c, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x4f, 0x46, 0x4c, + 0x0a, 0x0a, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x0a, 0x53, 0x49, 0x4c, 0x20, 0x4f, 0x50, 0x45, 0x4e, 0x20, + 0x46, 0x4f, 0x4e, 0x54, 0x20, 0x4c, 0x49, 0x43, 0x45, 0x4e, 0x53, 0x45, + 0x20, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x31, 0x2e, 0x31, + 0x20, 0x2d, 0x20, 0x32, 0x36, 0x20, 0x46, 0x65, 0x62, 0x72, 0x75, 0x61, + 0x72, 0x79, 0x20, 0x32, 0x30, 0x30, 0x37, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x0a, 0x50, 0x52, 0x45, + 0x41, 0x4d, 0x42, 0x4c, 0x45, 0x0a, 0x54, 0x68, 0x65, 0x20, 0x67, 0x6f, + 0x61, 0x6c, 0x73, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x4f, + 0x70, 0x65, 0x6e, 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x4c, 0x69, 0x63, + 0x65, 0x6e, 0x73, 0x65, 0x20, 0x28, 0x4f, 0x46, 0x4c, 0x29, 0x20, 0x61, + 0x72, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x74, 0x69, 0x6d, 0x75, 0x6c, + 0x61, 0x74, 0x65, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, 0x77, 0x69, 0x64, + 0x65, 0x0a, 0x64, 0x65, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x6d, 0x65, 0x6e, + 0x74, 0x20, 0x6f, 0x66, 0x20, 0x63, 0x6f, 0x6c, 0x6c, 0x61, 0x62, 0x6f, + 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x20, + 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x2c, 0x20, 0x74, 0x6f, + 0x20, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x0a, 0x65, 0x66, 0x66, 0x6f, 0x72, 0x74, 0x73, 0x20, 0x6f, + 0x66, 0x20, 0x61, 0x63, 0x61, 0x64, 0x65, 0x6d, 0x69, 0x63, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x6c, 0x69, 0x6e, 0x67, 0x75, 0x69, 0x73, 0x74, 0x69, + 0x63, 0x20, 0x63, 0x6f, 0x6d, 0x6d, 0x75, 0x6e, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x74, 0x6f, 0x20, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x64, 0x65, 0x20, 0x61, 0x20, 0x66, 0x72, 0x65, 0x65, + 0x20, 0x61, 0x6e, 0x64, 0x0a, 0x6f, 0x70, 0x65, 0x6e, 0x20, 0x66, 0x72, + 0x61, 0x6d, 0x65, 0x77, 0x6f, 0x72, 0x6b, 0x20, 0x69, 0x6e, 0x20, 0x77, + 0x68, 0x69, 0x63, 0x68, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x73, 0x20, 0x6d, + 0x61, 0x79, 0x20, 0x62, 0x65, 0x20, 0x73, 0x68, 0x61, 0x72, 0x65, 0x64, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x69, 0x6d, 0x70, 0x72, 0x6f, 0x76, 0x65, + 0x64, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x61, 0x72, 0x74, 0x6e, 0x65, 0x72, + 0x73, 0x68, 0x69, 0x70, 0x0a, 0x77, 0x69, 0x74, 0x68, 0x20, 0x6f, 0x74, + 0x68, 0x65, 0x72, 0x73, 0x2e, 0x0a, 0x0a, 0x54, 0x68, 0x65, 0x20, 0x4f, + 0x46, 0x4c, 0x20, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x73, 0x20, 0x74, 0x68, + 0x65, 0x20, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x64, 0x20, 0x66, + 0x6f, 0x6e, 0x74, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65, 0x20, 0x75, + 0x73, 0x65, 0x64, 0x2c, 0x20, 0x73, 0x74, 0x75, 0x64, 0x69, 0x65, 0x64, + 0x2c, 0x20, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x20, 0x61, + 0x6e, 0x64, 0x0a, 0x72, 0x65, 0x64, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, + 0x75, 0x74, 0x65, 0x64, 0x20, 0x66, 0x72, 0x65, 0x65, 0x6c, 0x79, 0x20, + 0x61, 0x73, 0x20, 0x6c, 0x6f, 0x6e, 0x67, 0x20, 0x61, 0x73, 0x20, 0x74, + 0x68, 0x65, 0x79, 0x20, 0x61, 0x72, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, + 0x73, 0x6f, 0x6c, 0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65, 0x6d, + 0x73, 0x65, 0x6c, 0x76, 0x65, 0x73, 0x2e, 0x20, 0x54, 0x68, 0x65, 0x0a, + 0x66, 0x6f, 0x6e, 0x74, 0x73, 0x2c, 0x20, 0x69, 0x6e, 0x63, 0x6c, 0x75, + 0x64, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x64, 0x65, 0x72, + 0x69, 0x76, 0x61, 0x74, 0x69, 0x76, 0x65, 0x20, 0x77, 0x6f, 0x72, 0x6b, + 0x73, 0x2c, 0x20, 0x63, 0x61, 0x6e, 0x20, 0x62, 0x65, 0x20, 0x62, 0x75, + 0x6e, 0x64, 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x2c, 0x20, 0x0a, 0x72, 0x65, 0x64, 0x69, 0x73, 0x74, + 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x64, 0x20, 0x61, 0x6e, 0x64, 0x2f, + 0x6f, 0x72, 0x20, 0x73, 0x6f, 0x6c, 0x64, 0x20, 0x77, 0x69, 0x74, 0x68, + 0x20, 0x61, 0x6e, 0x79, 0x20, 0x73, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, + 0x65, 0x20, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x64, 0x20, 0x74, + 0x68, 0x61, 0x74, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x72, 0x65, 0x73, 0x65, + 0x72, 0x76, 0x65, 0x64, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x20, 0x61, + 0x72, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, + 0x62, 0x79, 0x20, 0x64, 0x65, 0x72, 0x69, 0x76, 0x61, 0x74, 0x69, 0x76, + 0x65, 0x20, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x2e, 0x20, 0x54, 0x68, 0x65, + 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x64, + 0x65, 0x72, 0x69, 0x76, 0x61, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2c, 0x0a, + 0x68, 0x6f, 0x77, 0x65, 0x76, 0x65, 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6e, + 0x6e, 0x6f, 0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x65, 0x6c, 0x65, 0x61, + 0x73, 0x65, 0x64, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x20, 0x61, 0x6e, + 0x79, 0x20, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x2e, + 0x20, 0x54, 0x68, 0x65, 0x0a, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x66, 0x6f, 0x6e, + 0x74, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x6d, 0x61, 0x69, 0x6e, + 0x20, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x20, 0x74, 0x68, 0x69, 0x73, 0x20, + 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x20, 0x64, 0x6f, 0x65, 0x73, + 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x74, + 0x6f, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x20, 0x75, + 0x73, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x20, 0x66, 0x6f, 0x6e, + 0x74, 0x73, 0x20, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, 0x69, 0x72, 0x20, + 0x64, 0x65, 0x72, 0x69, 0x76, 0x61, 0x74, 0x69, 0x76, 0x65, 0x73, 0x2e, + 0x0a, 0x0a, 0x44, 0x45, 0x46, 0x49, 0x4e, 0x49, 0x54, 0x49, 0x4f, 0x4e, + 0x53, 0x0a, 0x22, 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x53, 0x6f, 0x66, 0x74, + 0x77, 0x61, 0x72, 0x65, 0x22, 0x20, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, + 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x73, 0x65, 0x74, 0x20, + 0x6f, 0x66, 0x20, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x20, 0x72, 0x65, 0x6c, + 0x65, 0x61, 0x73, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x43, 0x6f, 0x70, 0x79, 0x72, 0x69, 0x67, 0x68, 0x74, 0x0a, 0x48, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x28, 0x73, 0x29, 0x20, 0x75, 0x6e, 0x64, + 0x65, 0x72, 0x20, 0x74, 0x68, 0x69, 0x73, 0x20, 0x6c, 0x69, 0x63, 0x65, + 0x6e, 0x73, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x63, 0x6c, 0x65, 0x61, + 0x72, 0x6c, 0x79, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x65, 0x64, 0x20, 0x61, + 0x73, 0x20, 0x73, 0x75, 0x63, 0x68, 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, + 0x20, 0x6d, 0x61, 0x79, 0x0a, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, + 0x20, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x66, 0x69, 0x6c, 0x65, + 0x73, 0x2c, 0x20, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x20, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x64, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x0a, + 0x0a, 0x22, 0x52, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x20, 0x46, + 0x6f, 0x6e, 0x74, 0x20, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x20, 0x72, 0x65, + 0x66, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x61, 0x6e, 0x79, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x20, 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, + 0x69, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, 0x73, 0x75, 0x63, 0x68, 0x20, + 0x61, 0x66, 0x74, 0x65, 0x72, 0x20, 0x74, 0x68, 0x65, 0x0a, 0x63, 0x6f, + 0x70, 0x79, 0x72, 0x69, 0x67, 0x68, 0x74, 0x20, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x73, 0x29, 0x2e, 0x0a, 0x0a, 0x22, + 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x6c, 0x20, 0x56, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, + 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6c, 0x6c, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, 0x66, 0x20, 0x46, 0x6f, + 0x6e, 0x74, 0x20, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, 0x20, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x61, + 0x73, 0x0a, 0x64, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65, 0x20, 0x43, 0x6f, 0x70, + 0x79, 0x72, 0x69, 0x67, 0x68, 0x74, 0x20, 0x48, 0x6f, 0x6c, 0x64, 0x65, + 0x72, 0x28, 0x73, 0x29, 0x2e, 0x0a, 0x0a, 0x22, 0x4d, 0x6f, 0x64, 0x69, + 0x66, 0x69, 0x65, 0x64, 0x20, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x22, 0x20, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, + 0x61, 0x6e, 0x79, 0x20, 0x64, 0x65, 0x72, 0x69, 0x76, 0x61, 0x74, 0x69, + 0x76, 0x65, 0x20, 0x6d, 0x61, 0x64, 0x65, 0x20, 0x62, 0x79, 0x20, 0x61, + 0x64, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x6f, 0x2c, 0x20, 0x64, 0x65, + 0x6c, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x2c, 0x0a, 0x6f, 0x72, 0x20, 0x73, + 0x75, 0x62, 0x73, 0x74, 0x69, 0x74, 0x75, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x2d, 0x2d, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x61, 0x72, 0x74, 0x20, 0x6f, + 0x72, 0x20, 0x69, 0x6e, 0x20, 0x77, 0x68, 0x6f, 0x6c, 0x65, 0x20, 0x2d, + 0x2d, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x20, + 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x0a, 0x4f, 0x72, 0x69, 0x67, 0x69, + 0x6e, 0x61, 0x6c, 0x20, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x2c, + 0x20, 0x62, 0x79, 0x20, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x69, 0x6e, 0x67, + 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x73, 0x20, 0x6f, 0x72, 0x20, + 0x62, 0x79, 0x20, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, + 0x68, 0x65, 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x53, 0x6f, 0x66, 0x74, + 0x77, 0x61, 0x72, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x61, 0x0a, 0x6e, 0x65, + 0x77, 0x20, 0x65, 0x6e, 0x76, 0x69, 0x72, 0x6f, 0x6e, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x0a, 0x0a, 0x22, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x22, + 0x20, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x61, + 0x6e, 0x79, 0x20, 0x64, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x72, 0x2c, + 0x20, 0x65, 0x6e, 0x67, 0x69, 0x6e, 0x65, 0x65, 0x72, 0x2c, 0x20, 0x70, + 0x72, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x65, 0x72, 0x2c, 0x20, 0x74, + 0x65, 0x63, 0x68, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x0a, 0x77, 0x72, 0x69, + 0x74, 0x65, 0x72, 0x20, 0x6f, 0x72, 0x20, 0x6f, 0x74, 0x68, 0x65, 0x72, + 0x20, 0x70, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x20, 0x77, 0x68, 0x6f, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x64, 0x20, + 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x20, + 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, 0x2e, 0x0a, 0x0a, 0x50, + 0x45, 0x52, 0x4d, 0x49, 0x53, 0x53, 0x49, 0x4f, 0x4e, 0x20, 0x26, 0x20, + 0x43, 0x4f, 0x4e, 0x44, 0x49, 0x54, 0x49, 0x4f, 0x4e, 0x53, 0x0a, 0x50, + 0x65, 0x72, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x73, + 0x20, 0x68, 0x65, 0x72, 0x65, 0x62, 0x79, 0x20, 0x67, 0x72, 0x61, 0x6e, + 0x74, 0x65, 0x64, 0x2c, 0x20, 0x66, 0x72, 0x65, 0x65, 0x20, 0x6f, 0x66, + 0x20, 0x63, 0x68, 0x61, 0x72, 0x67, 0x65, 0x2c, 0x20, 0x74, 0x6f, 0x20, + 0x61, 0x6e, 0x79, 0x20, 0x70, 0x65, 0x72, 0x73, 0x6f, 0x6e, 0x20, 0x6f, + 0x62, 0x74, 0x61, 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x0a, 0x61, 0x20, 0x63, + 0x6f, 0x70, 0x79, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x46, + 0x6f, 0x6e, 0x74, 0x20, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, + 0x2c, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, 0x65, 0x2c, 0x20, 0x73, 0x74, + 0x75, 0x64, 0x79, 0x2c, 0x20, 0x63, 0x6f, 0x70, 0x79, 0x2c, 0x20, 0x6d, + 0x65, 0x72, 0x67, 0x65, 0x2c, 0x20, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x2c, + 0x20, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x79, 0x2c, 0x0a, 0x72, 0x65, 0x64, + 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x2c, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x73, 0x65, 0x6c, 0x6c, 0x20, 0x6d, 0x6f, 0x64, 0x69, + 0x66, 0x69, 0x65, 0x64, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x6e, 0x6d, + 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x20, 0x63, 0x6f, 0x70, 0x69, + 0x65, 0x73, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x46, 0x6f, + 0x6e, 0x74, 0x0a, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, 0x2c, + 0x20, 0x73, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x66, 0x6f, 0x6c, 0x6c, 0x6f, 0x77, 0x69, 0x6e, + 0x67, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x3a, 0x0a, 0x0a, 0x31, 0x29, 0x20, 0x4e, 0x65, 0x69, 0x74, 0x68, 0x65, + 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x53, + 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, 0x20, 0x6e, 0x6f, 0x72, 0x20, + 0x61, 0x6e, 0x79, 0x20, 0x6f, 0x66, 0x20, 0x69, 0x74, 0x73, 0x20, 0x69, + 0x6e, 0x64, 0x69, 0x76, 0x69, 0x64, 0x75, 0x61, 0x6c, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x0a, 0x69, 0x6e, + 0x20, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x6c, 0x20, 0x6f, 0x72, + 0x20, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x20, 0x56, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x20, 0x6d, 0x61, 0x79, 0x20, + 0x62, 0x65, 0x20, 0x73, 0x6f, 0x6c, 0x64, 0x20, 0x62, 0x79, 0x20, 0x69, + 0x74, 0x73, 0x65, 0x6c, 0x66, 0x2e, 0x0a, 0x0a, 0x32, 0x29, 0x20, 0x4f, + 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, 0x4d, + 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x20, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x73, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, + 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, + 0x65, 0x20, 0x6d, 0x61, 0x79, 0x20, 0x62, 0x65, 0x20, 0x62, 0x75, 0x6e, + 0x64, 0x6c, 0x65, 0x64, 0x2c, 0x0a, 0x72, 0x65, 0x64, 0x69, 0x73, 0x74, + 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x64, 0x20, 0x61, 0x6e, 0x64, 0x2f, + 0x6f, 0x72, 0x20, 0x73, 0x6f, 0x6c, 0x64, 0x20, 0x77, 0x69, 0x74, 0x68, + 0x20, 0x61, 0x6e, 0x79, 0x20, 0x73, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, + 0x65, 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x64, 0x20, + 0x74, 0x68, 0x61, 0x74, 0x20, 0x65, 0x61, 0x63, 0x68, 0x20, 0x63, 0x6f, + 0x70, 0x79, 0x0a, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x61, 0x62, 0x6f, 0x76, 0x65, 0x20, 0x63, 0x6f, + 0x70, 0x79, 0x72, 0x69, 0x67, 0x68, 0x74, 0x20, 0x6e, 0x6f, 0x74, 0x69, + 0x63, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x74, 0x68, 0x69, 0x73, 0x20, + 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x2e, 0x20, 0x54, 0x68, 0x65, + 0x73, 0x65, 0x20, 0x63, 0x61, 0x6e, 0x20, 0x62, 0x65, 0x0a, 0x69, 0x6e, + 0x63, 0x6c, 0x75, 0x64, 0x65, 0x64, 0x20, 0x65, 0x69, 0x74, 0x68, 0x65, + 0x72, 0x20, 0x61, 0x73, 0x20, 0x73, 0x74, 0x61, 0x6e, 0x64, 0x2d, 0x61, + 0x6c, 0x6f, 0x6e, 0x65, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x66, 0x69, + 0x6c, 0x65, 0x73, 0x2c, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x2d, 0x72, + 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x68, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x73, 0x20, 0x6f, 0x72, 0x0a, 0x69, 0x6e, 0x20, 0x74, 0x68, + 0x65, 0x20, 0x61, 0x70, 0x70, 0x72, 0x6f, 0x70, 0x72, 0x69, 0x61, 0x74, + 0x65, 0x20, 0x6d, 0x61, 0x63, 0x68, 0x69, 0x6e, 0x65, 0x2d, 0x72, 0x65, + 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x6d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x20, 0x77, + 0x69, 0x74, 0x68, 0x69, 0x6e, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x6f, + 0x72, 0x0a, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x20, 0x66, 0x69, 0x6c, + 0x65, 0x73, 0x20, 0x61, 0x73, 0x20, 0x6c, 0x6f, 0x6e, 0x67, 0x20, 0x61, + 0x73, 0x20, 0x74, 0x68, 0x6f, 0x73, 0x65, 0x20, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x20, 0x63, 0x61, 0x6e, 0x20, 0x62, 0x65, 0x20, 0x65, 0x61, + 0x73, 0x69, 0x6c, 0x79, 0x20, 0x76, 0x69, 0x65, 0x77, 0x65, 0x64, 0x20, + 0x62, 0x79, 0x20, 0x74, 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x2e, + 0x0a, 0x0a, 0x33, 0x29, 0x20, 0x4e, 0x6f, 0x20, 0x4d, 0x6f, 0x64, 0x69, + 0x66, 0x69, 0x65, 0x64, 0x20, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x46, 0x6f, 0x6e, 0x74, + 0x20, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, 0x20, 0x6d, 0x61, + 0x79, 0x20, 0x75, 0x73, 0x65, 0x20, 0x74, 0x68, 0x65, 0x20, 0x52, 0x65, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x0a, + 0x4e, 0x61, 0x6d, 0x65, 0x28, 0x73, 0x29, 0x20, 0x75, 0x6e, 0x6c, 0x65, + 0x73, 0x73, 0x20, 0x65, 0x78, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x20, + 0x77, 0x72, 0x69, 0x74, 0x74, 0x65, 0x6e, 0x20, 0x70, 0x65, 0x72, 0x6d, + 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x73, 0x20, 0x67, 0x72, + 0x61, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x63, 0x6f, 0x72, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x64, 0x69, + 0x6e, 0x67, 0x0a, 0x43, 0x6f, 0x70, 0x79, 0x72, 0x69, 0x67, 0x68, 0x74, + 0x20, 0x48, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, + 0x73, 0x20, 0x72, 0x65, 0x73, 0x74, 0x72, 0x69, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x6f, 0x6e, 0x6c, 0x79, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x69, + 0x65, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x70, 0x72, + 0x69, 0x6d, 0x61, 0x72, 0x79, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x20, 0x61, 0x73, 0x0a, 0x70, 0x72, 0x65, 0x73, 0x65, + 0x6e, 0x74, 0x65, 0x64, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x73, 0x2e, 0x0a, 0x0a, 0x34, 0x29, 0x20, 0x54, + 0x68, 0x65, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x28, 0x73, 0x29, 0x20, 0x6f, + 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x43, 0x6f, 0x70, 0x79, 0x72, 0x69, + 0x67, 0x68, 0x74, 0x20, 0x48, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x28, 0x73, + 0x29, 0x20, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x41, 0x75, 0x74, + 0x68, 0x6f, 0x72, 0x28, 0x73, 0x29, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, + 0x65, 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x0a, 0x53, 0x6f, 0x66, 0x74, 0x77, + 0x61, 0x72, 0x65, 0x20, 0x73, 0x68, 0x61, 0x6c, 0x6c, 0x20, 0x6e, 0x6f, + 0x74, 0x20, 0x62, 0x65, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x6f, + 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x6f, 0x74, 0x65, 0x2c, 0x20, 0x65, 0x6e, + 0x64, 0x6f, 0x72, 0x73, 0x65, 0x20, 0x6f, 0x72, 0x20, 0x61, 0x64, 0x76, + 0x65, 0x72, 0x74, 0x69, 0x73, 0x65, 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x4d, + 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x20, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x2c, 0x20, 0x65, 0x78, 0x63, 0x65, 0x70, 0x74, 0x20, + 0x74, 0x6f, 0x20, 0x61, 0x63, 0x6b, 0x6e, 0x6f, 0x77, 0x6c, 0x65, 0x64, + 0x67, 0x65, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x73, 0x29, 0x20, 0x6f, + 0x66, 0x20, 0x74, 0x68, 0x65, 0x0a, 0x43, 0x6f, 0x70, 0x79, 0x72, 0x69, + 0x67, 0x68, 0x74, 0x20, 0x48, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x28, 0x73, + 0x29, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x74, 0x68, 0x65, 0x20, 0x41, 0x75, + 0x74, 0x68, 0x6f, 0x72, 0x28, 0x73, 0x29, 0x20, 0x6f, 0x72, 0x20, 0x77, + 0x69, 0x74, 0x68, 0x20, 0x74, 0x68, 0x65, 0x69, 0x72, 0x20, 0x65, 0x78, + 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x20, 0x77, 0x72, 0x69, 0x74, 0x74, + 0x65, 0x6e, 0x0a, 0x70, 0x65, 0x72, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x0a, 0x0a, 0x35, 0x29, 0x20, 0x54, 0x68, 0x65, 0x20, 0x46, + 0x6f, 0x6e, 0x74, 0x20, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, + 0x2c, 0x20, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x20, 0x6f, + 0x72, 0x20, 0x75, 0x6e, 0x6d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, + 0x2c, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x61, 0x72, 0x74, 0x20, 0x6f, 0x72, + 0x20, 0x69, 0x6e, 0x20, 0x77, 0x68, 0x6f, 0x6c, 0x65, 0x2c, 0x0a, 0x6d, + 0x75, 0x73, 0x74, 0x20, 0x62, 0x65, 0x20, 0x64, 0x69, 0x73, 0x74, 0x72, + 0x69, 0x62, 0x75, 0x74, 0x65, 0x64, 0x20, 0x65, 0x6e, 0x74, 0x69, 0x72, + 0x65, 0x6c, 0x79, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x20, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x2c, 0x20, + 0x61, 0x6e, 0x64, 0x20, 0x6d, 0x75, 0x73, 0x74, 0x20, 0x6e, 0x6f, 0x74, + 0x20, 0x62, 0x65, 0x0a, 0x64, 0x69, 0x73, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x64, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x20, 0x61, 0x6e, + 0x79, 0x20, 0x6f, 0x74, 0x68, 0x65, 0x72, 0x20, 0x6c, 0x69, 0x63, 0x65, + 0x6e, 0x73, 0x65, 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x72, 0x65, 0x71, + 0x75, 0x69, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x20, 0x66, 0x6f, 0x72, + 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x73, 0x20, 0x74, 0x6f, 0x0a, 0x72, 0x65, + 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x75, 0x6e, 0x64, 0x65, 0x72, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x20, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x20, + 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x61, 0x70, 0x70, + 0x6c, 0x79, 0x20, 0x74, 0x6f, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x64, 0x6f, + 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x64, 0x0a, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x46, 0x6f, 0x6e, 0x74, 0x20, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, + 0x72, 0x65, 0x2e, 0x0a, 0x0a, 0x54, 0x45, 0x52, 0x4d, 0x49, 0x4e, 0x41, + 0x54, 0x49, 0x4f, 0x4e, 0x0a, 0x54, 0x68, 0x69, 0x73, 0x20, 0x6c, 0x69, + 0x63, 0x65, 0x6e, 0x73, 0x65, 0x20, 0x62, 0x65, 0x63, 0x6f, 0x6d, 0x65, + 0x73, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x69, 0x66, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6f, + 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, 0x62, 0x6f, 0x76, 0x65, 0x20, + 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x20, 0x61, + 0x72, 0x65, 0x0a, 0x6e, 0x6f, 0x74, 0x20, 0x6d, 0x65, 0x74, 0x2e, 0x0a, + 0x0a, 0x44, 0x49, 0x53, 0x43, 0x4c, 0x41, 0x49, 0x4d, 0x45, 0x52, 0x0a, + 0x54, 0x48, 0x45, 0x20, 0x46, 0x4f, 0x4e, 0x54, 0x20, 0x53, 0x4f, 0x46, + 0x54, 0x57, 0x41, 0x52, 0x45, 0x20, 0x49, 0x53, 0x20, 0x50, 0x52, 0x4f, + 0x56, 0x49, 0x44, 0x45, 0x44, 0x20, 0x22, 0x41, 0x53, 0x20, 0x49, 0x53, + 0x22, 0x2c, 0x20, 0x57, 0x49, 0x54, 0x48, 0x4f, 0x55, 0x54, 0x20, 0x57, + 0x41, 0x52, 0x52, 0x41, 0x4e, 0x54, 0x59, 0x20, 0x4f, 0x46, 0x20, 0x41, + 0x4e, 0x59, 0x20, 0x4b, 0x49, 0x4e, 0x44, 0x2c, 0x0a, 0x45, 0x58, 0x50, + 0x52, 0x45, 0x53, 0x53, 0x20, 0x4f, 0x52, 0x20, 0x49, 0x4d, 0x50, 0x4c, + 0x49, 0x45, 0x44, 0x2c, 0x20, 0x49, 0x4e, 0x43, 0x4c, 0x55, 0x44, 0x49, + 0x4e, 0x47, 0x20, 0x42, 0x55, 0x54, 0x20, 0x4e, 0x4f, 0x54, 0x20, 0x4c, + 0x49, 0x4d, 0x49, 0x54, 0x45, 0x44, 0x20, 0x54, 0x4f, 0x20, 0x41, 0x4e, + 0x59, 0x20, 0x57, 0x41, 0x52, 0x52, 0x41, 0x4e, 0x54, 0x49, 0x45, 0x53, + 0x20, 0x4f, 0x46, 0x0a, 0x4d, 0x45, 0x52, 0x43, 0x48, 0x41, 0x4e, 0x54, + 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x2c, 0x20, 0x46, 0x49, 0x54, + 0x4e, 0x45, 0x53, 0x53, 0x20, 0x46, 0x4f, 0x52, 0x20, 0x41, 0x20, 0x50, + 0x41, 0x52, 0x54, 0x49, 0x43, 0x55, 0x4c, 0x41, 0x52, 0x20, 0x50, 0x55, + 0x52, 0x50, 0x4f, 0x53, 0x45, 0x20, 0x41, 0x4e, 0x44, 0x20, 0x4e, 0x4f, + 0x4e, 0x49, 0x4e, 0x46, 0x52, 0x49, 0x4e, 0x47, 0x45, 0x4d, 0x45, 0x4e, + 0x54, 0x0a, 0x4f, 0x46, 0x20, 0x43, 0x4f, 0x50, 0x59, 0x52, 0x49, 0x47, + 0x48, 0x54, 0x2c, 0x20, 0x50, 0x41, 0x54, 0x45, 0x4e, 0x54, 0x2c, 0x20, + 0x54, 0x52, 0x41, 0x44, 0x45, 0x4d, 0x41, 0x52, 0x4b, 0x2c, 0x20, 0x4f, + 0x52, 0x20, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x20, 0x52, 0x49, 0x47, 0x48, + 0x54, 0x2e, 0x20, 0x49, 0x4e, 0x20, 0x4e, 0x4f, 0x20, 0x45, 0x56, 0x45, + 0x4e, 0x54, 0x20, 0x53, 0x48, 0x41, 0x4c, 0x4c, 0x20, 0x54, 0x48, 0x45, + 0x0a, 0x43, 0x4f, 0x50, 0x59, 0x52, 0x49, 0x47, 0x48, 0x54, 0x20, 0x48, + 0x4f, 0x4c, 0x44, 0x45, 0x52, 0x20, 0x42, 0x45, 0x20, 0x4c, 0x49, 0x41, + 0x42, 0x4c, 0x45, 0x20, 0x46, 0x4f, 0x52, 0x20, 0x41, 0x4e, 0x59, 0x20, + 0x43, 0x4c, 0x41, 0x49, 0x4d, 0x2c, 0x20, 0x44, 0x41, 0x4d, 0x41, 0x47, + 0x45, 0x53, 0x20, 0x4f, 0x52, 0x20, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x20, + 0x4c, 0x49, 0x41, 0x42, 0x49, 0x4c, 0x49, 0x54, 0x59, 0x2c, 0x0a, 0x49, + 0x4e, 0x43, 0x4c, 0x55, 0x44, 0x49, 0x4e, 0x47, 0x20, 0x41, 0x4e, 0x59, + 0x20, 0x47, 0x45, 0x4e, 0x45, 0x52, 0x41, 0x4c, 0x2c, 0x20, 0x53, 0x50, + 0x45, 0x43, 0x49, 0x41, 0x4c, 0x2c, 0x20, 0x49, 0x4e, 0x44, 0x49, 0x52, + 0x45, 0x43, 0x54, 0x2c, 0x20, 0x49, 0x4e, 0x43, 0x49, 0x44, 0x45, 0x4e, + 0x54, 0x41, 0x4c, 0x2c, 0x20, 0x4f, 0x52, 0x20, 0x43, 0x4f, 0x4e, 0x53, + 0x45, 0x51, 0x55, 0x45, 0x4e, 0x54, 0x49, 0x41, 0x4c, 0x0a, 0x44, 0x41, + 0x4d, 0x41, 0x47, 0x45, 0x53, 0x2c, 0x20, 0x57, 0x48, 0x45, 0x54, 0x48, + 0x45, 0x52, 0x20, 0x49, 0x4e, 0x20, 0x41, 0x4e, 0x20, 0x41, 0x43, 0x54, + 0x49, 0x4f, 0x4e, 0x20, 0x4f, 0x46, 0x20, 0x43, 0x4f, 0x4e, 0x54, 0x52, + 0x41, 0x43, 0x54, 0x2c, 0x20, 0x54, 0x4f, 0x52, 0x54, 0x20, 0x4f, 0x52, + 0x20, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x57, 0x49, 0x53, 0x45, 0x2c, 0x20, + 0x41, 0x52, 0x49, 0x53, 0x49, 0x4e, 0x47, 0x0a, 0x46, 0x52, 0x4f, 0x4d, + 0x2c, 0x20, 0x4f, 0x55, 0x54, 0x20, 0x4f, 0x46, 0x20, 0x54, 0x48, 0x45, + 0x20, 0x55, 0x53, 0x45, 0x20, 0x4f, 0x52, 0x20, 0x49, 0x4e, 0x41, 0x42, + 0x49, 0x4c, 0x49, 0x54, 0x59, 0x20, 0x54, 0x4f, 0x20, 0x55, 0x53, 0x45, + 0x20, 0x54, 0x48, 0x45, 0x20, 0x46, 0x4f, 0x4e, 0x54, 0x20, 0x53, 0x4f, + 0x46, 0x54, 0x57, 0x41, 0x52, 0x45, 0x20, 0x4f, 0x52, 0x20, 0x46, 0x52, + 0x4f, 0x4d, 0x0a, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x20, 0x44, 0x45, 0x41, + 0x4c, 0x49, 0x4e, 0x47, 0x53, 0x20, 0x49, 0x4e, 0x20, 0x54, 0x48, 0x45, + 0x20, 0x46, 0x4f, 0x4e, 0x54, 0x20, 0x53, 0x4f, 0x46, 0x54, 0x57, 0x41, + 0x52, 0x45, 0x2e, 0x00, 0x00, 0x68, 0x00, 0x74, 0x00, 0x74, 0x00, 0x70, + 0x00, 0x3a, 0x00, 0x2f, 0x00, 0x2f, 0x00, 0x73, 0x00, 0x63, 0x00, 0x72, + 0x00, 0x69, 0x00, 0x70, 0x00, 0x74, 0x00, 0x73, 0x00, 0x2e, 0x00, 0x73, + 0x00, 0x69, 0x00, 0x6c, 0x00, 0x2e, 0x00, 0x6f, 0x00, 0x72, 0x00, 0x67, + 0x00, 0x2f, 0x00, 0x4f, 0x00, 0x46, 0x00, 0x4c, 0x00, 0x00, 0x68, 0x74, + 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, + 0x2e, 0x73, 0x69, 0x6c, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x4f, 0x46, 0x4c, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x32, 0x00, 0x64, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x06, 0x00, 0x00, 0x01, 0x02, 0x01, 0x03, 0x01, 0x04, 0x01, 0x05, + 0x01, 0x06, 0x01, 0x07, 0x01, 0x08, 0x01, 0x09, 0x01, 0x0a, 0x01, 0x0b, + 0x01, 0x0c, 0x01, 0x0d, 0x01, 0x0e, 0x01, 0x0f, 0x01, 0x10, 0x01, 0x11, + 0x01, 0x12, 0x01, 0x13, 0x01, 0x14, 0x01, 0x15, 0x01, 0x16, 0x01, 0x17, + 0x01, 0x18, 0x01, 0x19, 0x01, 0x1a, 0x01, 0x1b, 0x01, 0x1c, 0x01, 0x1d, + 0x01, 0x1e, 0x01, 0x1f, 0x01, 0x20, 0x01, 0x21, 0x00, 0x03, 0x00, 0x04, + 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x08, 0x00, 0x09, 0x00, 0x0a, + 0x00, 0x0b, 0x00, 0x0c, 0x00, 0x0d, 0x00, 0x0e, 0x00, 0x0f, 0x00, 0x10, + 0x00, 0x11, 0x00, 0x12, 0x00, 0x13, 0x00, 0x14, 0x00, 0x15, 0x00, 0x16, + 0x00, 0x17, 0x00, 0x18, 0x00, 0x19, 0x00, 0x1a, 0x00, 0x1b, 0x00, 0x1c, + 0x00, 0x1d, 0x00, 0x1e, 0x00, 0x1f, 0x00, 0x20, 0x00, 0x21, 0x00, 0x22, + 0x00, 0x23, 0x00, 0x24, 0x00, 0x25, 0x00, 0x26, 0x00, 0x27, 0x00, 0x28, + 0x00, 0x29, 0x00, 0x2a, 0x00, 0x2b, 0x00, 0x2c, 0x00, 0x2d, 0x00, 0x2e, + 0x00, 0x2f, 0x00, 0x30, 0x00, 0x31, 0x00, 0x32, 0x00, 0x33, 0x00, 0x34, + 0x00, 0x35, 0x00, 0x36, 0x00, 0x37, 0x00, 0x38, 0x00, 0x39, 0x00, 0x3a, + 0x00, 0x3b, 0x00, 0x3c, 0x00, 0x3d, 0x00, 0x3e, 0x00, 0x3f, 0x00, 0x40, + 0x00, 0x41, 0x00, 0x42, 0x00, 0x43, 0x00, 0x44, 0x00, 0x45, 0x00, 0x46, + 0x00, 0x47, 0x00, 0x48, 0x00, 0x49, 0x00, 0x4a, 0x00, 0x4b, 0x00, 0x4c, + 0x00, 0x4d, 0x00, 0x4e, 0x00, 0x4f, 0x00, 0x50, 0x00, 0x51, 0x00, 0x52, + 0x00, 0x53, 0x00, 0x54, 0x00, 0x55, 0x00, 0x56, 0x00, 0x57, 0x00, 0x58, + 0x00, 0x59, 0x00, 0x5a, 0x00, 0x5b, 0x00, 0x5c, 0x00, 0x5d, 0x00, 0x5e, + 0x00, 0x5f, 0x00, 0x60, 0x00, 0x61, 0x01, 0x22, 0x01, 0x23, 0x01, 0x24, + 0x00, 0xc4, 0x00, 0xa6, 0x00, 0xc5, 0x00, 0xab, 0x00, 0x82, 0x00, 0xc2, + 0x00, 0xd8, 0x00, 0xc6, 0x00, 0xe4, 0x00, 0xbe, 0x00, 0xb0, 0x01, 0x25, + 0x00, 0xe6, 0x01, 0x26, 0x01, 0x27, 0x00, 0xb6, 0x00, 0xb7, 0x00, 0xb4, + 0x00, 0xb5, 0x00, 0x87, 0x00, 0xb2, 0x00, 0xb3, 0x00, 0xd9, 0x00, 0x8c, + 0x00, 0xe5, 0x00, 0xbf, 0x00, 0xb1, 0x01, 0x28, 0x00, 0xe7, 0x00, 0xbb, + 0x01, 0x29, 0x00, 0xa3, 0x00, 0x84, 0x00, 0x85, 0x00, 0xbd, 0x00, 0x96, + 0x00, 0xe8, 0x00, 0x86, 0x00, 0x8e, 0x00, 0x8b, 0x00, 0x9d, 0x00, 0xa9, + 0x00, 0xa4, 0x01, 0x2a, 0x00, 0x8a, 0x00, 0xda, 0x00, 0x83, 0x00, 0x93, + 0x01, 0x2b, 0x01, 0x2c, 0x00, 0x8d, 0x00, 0x97, 0x00, 0x88, 0x00, 0xc3, + 0x00, 0xde, 0x01, 0x2d, 0x00, 0x9e, 0x00, 0xaa, 0x00, 0xf5, 0x00, 0xf4, + 0x00, 0xf6, 0x00, 0xa2, 0x00, 0xad, 0x00, 0xc9, 0x00, 0xc7, 0x00, 0xae, + 0x00, 0x62, 0x00, 0x63, 0x00, 0x90, 0x00, 0x64, 0x00, 0xcb, 0x00, 0x65, + 0x00, 0xc8, 0x00, 0xca, 0x00, 0xcf, 0x00, 0xcc, 0x00, 0xcd, 0x00, 0xce, + 0x00, 0xe9, 0x00, 0x66, 0x00, 0xd3, 0x00, 0xd0, 0x00, 0xd1, 0x00, 0xaf, + 0x00, 0x67, 0x00, 0xf0, 0x00, 0x91, 0x00, 0xd6, 0x00, 0xd4, 0x00, 0xd5, + 0x00, 0x68, 0x00, 0xeb, 0x00, 0xed, 0x00, 0x89, 0x00, 0x6a, 0x00, 0x69, + 0x00, 0x6b, 0x00, 0x6d, 0x00, 0x6c, 0x00, 0x6e, 0x00, 0xa0, 0x00, 0x6f, + 0x00, 0x71, 0x00, 0x70, 0x00, 0x72, 0x00, 0x73, 0x00, 0x75, 0x00, 0x74, + 0x00, 0x76, 0x00, 0x77, 0x00, 0xea, 0x00, 0x78, 0x00, 0x7a, 0x00, 0x79, + 0x00, 0x7b, 0x00, 0x7d, 0x00, 0x7c, 0x00, 0xb8, 0x00, 0xa1, 0x00, 0x7f, + 0x00, 0x7e, 0x00, 0x80, 0x00, 0x81, 0x00, 0xec, 0x00, 0xee, 0x00, 0xba, + 0x01, 0x2e, 0x01, 0x2f, 0x01, 0x30, 0x01, 0x31, 0x01, 0x32, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x30, 0x30, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x30, 0x44, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x30, 0x31, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x30, 0x32, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x30, 0x33, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x30, 0x34, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x30, 0x35, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x30, 0x36, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x30, 0x37, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x30, 0x38, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x30, 0x39, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x30, 0x41, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x30, 0x42, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x30, 0x43, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x30, 0x45, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x30, 0x46, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x31, 0x30, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x31, 0x31, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x31, 0x32, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x31, 0x33, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x31, 0x34, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x31, 0x35, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x31, 0x36, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x31, 0x37, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x31, 0x38, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x31, 0x39, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x31, 0x41, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x31, 0x42, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x31, 0x43, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x31, 0x44, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x31, 0x45, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x31, 0x46, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x37, 0x46, 0x04, 0x45, + 0x75, 0x72, 0x6f, 0x0a, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x65, 0x2e, 0x31, + 0x32, 0x39, 0x0a, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x65, 0x2e, 0x31, 0x34, + 0x31, 0x0a, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x65, 0x2e, 0x31, 0x34, 0x33, + 0x0a, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x65, 0x2e, 0x31, 0x34, 0x34, 0x0a, + 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x65, 0x2e, 0x31, 0x35, 0x37, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x41, 0x30, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x41, 0x44, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, 0x42, 0x32, 0x07, 0x75, + 0x6e, 0x69, 0x30, 0x30, 0x42, 0x33, 0x07, 0x75, 0x6e, 0x69, 0x30, 0x30, + 0x42, 0x39, 0x0b, 0x6e, 0x61, 0x70, 0x6f, 0x73, 0x74, 0x72, 0x6f, 0x70, + 0x68, 0x65, 0x04, 0x4c, 0x64, 0x6f, 0x74, 0x04, 0x6c, 0x64, 0x6f, 0x74, + 0x02, 0x49, 0x4a, 0x0a, 0x4e, 0x61, 0x6d, 0x65, 0x4d, 0x65, 0x2e, 0x32, + 0x36, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xff, 0xff, 0x00, 0x02, + 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x14, + 0x00, 0x04, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, + 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, + 0xda, 0x92, 0x03, 0xf7, 0x00, 0x00, 0x00, 0x00, 0xda, 0x6d, 0x86, 0xe0, + 0x00, 0x00, 0x00, 0x00, 0xda, 0xe1, 0xd0, 0x88 +}; + +#endif diff --git a/MacroLibX/src/utils/icon_mlx.h b/MacroLibX/src/utils/icon_mlx.h new file mode 100644 index 0000000..a5450fb --- /dev/null +++ b/MacroLibX/src/utils/icon_mlx.h @@ -0,0 +1,525 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* icon_mlx.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/11/25 11:23:16 by maldavid #+# #+# */ +/* Updated: 2023/11/25 11:55:51 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __ICON_MLX__ +#define __ICON_MLX__ + +#include + +constexpr const int logo_mlx_height = 125; +constexpr const int logo_mlx_width = 125; +constexpr const int logo_mlx_size = logo_mlx_height * logo_mlx_width * 4; + +inline static uint8_t logo_mlx[logo_mlx_size] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe3, 0xe3, 0xe3, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe3, 0xe3, 0xe3, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, + 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, + 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, + 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0xe4, 0xe4, 0xe4, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, + 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, + 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, + 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x6b, 0x6b, 0x6b, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, + 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, + 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, + 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x37, 0x37, 0x37, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, + 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, + 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, + 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0xff, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 +}; + +#endif diff --git a/MacroLibX/src/utils/non_copyable.h b/MacroLibX/src/utils/non_copyable.h new file mode 100644 index 0000000..f805a99 --- /dev/null +++ b/MacroLibX/src/utils/non_copyable.h @@ -0,0 +1,33 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* non_copyable.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:20:13 by maldavid #+# #+# */ +/* Updated: 2022/10/08 19:21:22 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_NON_COPYABLE__ +#define __MLX_NON_COPYABLE__ + +namespace mlx +{ + class non_copyable + { + protected: + non_copyable() = default; + virtual ~non_copyable() = default; + + public: + non_copyable(const non_copyable&) = delete; + non_copyable(non_copyable&&) noexcept = default; + non_copyable &operator=(const non_copyable&) = delete; + non_copyable &operator=(non_copyable&&) noexcept = default; + }; + +} + +#endif // __MLX_NON_COPYABLE__ diff --git a/MacroLibX/src/utils/singleton.h b/MacroLibX/src/utils/singleton.h new file mode 100644 index 0000000..3cb925e --- /dev/null +++ b/MacroLibX/src/utils/singleton.h @@ -0,0 +1,32 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* singleton.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:18:46 by maldavid #+# #+# */ +/* Updated: 2022/10/08 19:22:07 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_SINGLETON__ +#define __MLX_SINGLETON__ + +#include "non_copyable.h" + +namespace mlx +{ + template + class Singleton : public non_copyable + { + public: + inline static T& get() + { + static T instance; + return instance; + } + }; +} + +#endif // __MLX_SINGLETON__ diff --git a/MacroLibX/third_party/glm/common.hpp b/MacroLibX/third_party/glm/common.hpp new file mode 100644 index 0000000..0328dc9 --- /dev/null +++ b/MacroLibX/third_party/glm/common.hpp @@ -0,0 +1,539 @@ +/// @ref core +/// @file glm/common.hpp +/// +/// @see GLSL 4.20.8 specification, section 8.3 Common Functions +/// +/// @defgroup core_func_common Common functions +/// @ingroup core +/// +/// Provides GLSL common functions +/// +/// These all operate component-wise. The description is per component. +/// +/// Include to use these core features. + +#pragma once + +#include "detail/qualifier.hpp" +#include "detail/_fixes.hpp" + +namespace glm +{ + /// @addtogroup core_func_common + /// @{ + + /// Returns x if x >= 0; otherwise, it returns -x. + /// + /// @tparam genType floating-point or signed integer; scalar or vector types. + /// + /// @see GLSL abs man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR genType abs(genType x); + + /// Returns x if x >= 0; otherwise, it returns -x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or signed integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL abs man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec abs(vec const& x); + + /// Returns 1.0 if x > 0, 0.0 if x == 0, or -1.0 if x < 0. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL sign man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec sign(vec const& x); + + /// Returns a value equal to the nearest integer that is less then or equal to x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL floor man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec floor(vec const& x); + + /// Returns a value equal to the nearest integer to x + /// whose absolute value is not larger than the absolute value of x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL trunc man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec trunc(vec const& x); + + /// Returns a value equal to the nearest integer to x. + /// The fraction 0.5 will round in a direction chosen by the + /// implementation, presumably the direction that is fastest. + /// This includes the possibility that round(x) returns the + /// same value as roundEven(x) for all values of x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL round man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec round(vec const& x); + + /// Returns a value equal to the nearest integer to x. + /// A fractional part of 0.5 will round toward the nearest even + /// integer. (Both 3.5 and 4.5 for x will return 4.0.) + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL roundEven man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + /// @see New round to even technique + template + GLM_FUNC_DECL vec roundEven(vec const& x); + + /// Returns a value equal to the nearest integer + /// that is greater than or equal to x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL ceil man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec ceil(vec const& x); + + /// Return x - floor(x). + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see GLSL fract man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL genType fract(genType x); + + /// Return x - floor(x). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL fract man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec fract(vec const& x); + + template + GLM_FUNC_DECL genType mod(genType x, genType y); + + template + GLM_FUNC_DECL vec mod(vec const& x, T y); + + /// Modulus. Returns x - y * floor(x / y) + /// for each component in x using the floating point value y. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types, include glm/gtc/integer for integer scalar types support + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL mod man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec mod(vec const& x, vec const& y); + + /// Returns the fractional part of x and sets i to the integer + /// part (as a whole number floating point value). Both the + /// return value and the output parameter will have the same + /// sign as x. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see GLSL modf man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL genType modf(genType x, genType& i); + + /// Returns y if y < x; otherwise, it returns x. + /// + /// @tparam genType Floating-point or integer; scalar or vector types. + /// + /// @see GLSL min man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR genType min(genType x, genType y); + + /// Returns y if y < x; otherwise, it returns x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL min man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec min(vec const& x, T y); + + /// Returns y if y < x; otherwise, it returns x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL min man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec min(vec const& x, vec const& y); + + /// Returns y if x < y; otherwise, it returns x. + /// + /// @tparam genType Floating-point or integer; scalar or vector types. + /// + /// @see GLSL max man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR genType max(genType x, genType y); + + /// Returns y if x < y; otherwise, it returns x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL max man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec max(vec const& x, T y); + + /// Returns y if x < y; otherwise, it returns x. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL max man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec max(vec const& x, vec const& y); + + /// Returns min(max(x, minVal), maxVal) for each component in x + /// using the floating-point values minVal and maxVal. + /// + /// @tparam genType Floating-point or integer; scalar or vector types. + /// + /// @see GLSL clamp man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR genType clamp(genType x, genType minVal, genType maxVal); + + /// Returns min(max(x, minVal), maxVal) for each component in x + /// using the floating-point values minVal and maxVal. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL clamp man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec clamp(vec const& x, T minVal, T maxVal); + + /// Returns min(max(x, minVal), maxVal) for each component in x + /// using the floating-point values minVal and maxVal. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL clamp man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec clamp(vec const& x, vec const& minVal, vec const& maxVal); + + /// If genTypeU is a floating scalar or vector: + /// Returns x * (1.0 - a) + y * a, i.e., the linear blend of + /// x and y using the floating-point value a. + /// The value for a is not restricted to the range [0, 1]. + /// + /// If genTypeU is a boolean scalar or vector: + /// Selects which vector each returned component comes + /// from. For a component of 'a' that is false, the + /// corresponding component of 'x' is returned. For a + /// component of 'a' that is true, the corresponding + /// component of 'y' is returned. Components of 'x' and 'y' that + /// are not selected are allowed to be invalid floating point + /// values and will have no effect on the results. Thus, this + /// provides different functionality than + /// genType mix(genType x, genType y, genType(a)) + /// where a is a Boolean vector. + /// + /// @see GLSL mix man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + /// + /// @param[in] x Value to interpolate. + /// @param[in] y Value to interpolate. + /// @param[in] a Interpolant. + /// + /// @tparam genTypeT Floating point scalar or vector. + /// @tparam genTypeU Floating point or boolean scalar or vector. It can't be a vector if it is the length of genTypeT. + /// + /// @code + /// #include + /// ... + /// float a; + /// bool b; + /// glm::dvec3 e; + /// glm::dvec3 f; + /// glm::vec4 g; + /// glm::vec4 h; + /// ... + /// glm::vec4 r = glm::mix(g, h, a); // Interpolate with a floating-point scalar two vectors. + /// glm::vec4 s = glm::mix(g, h, b); // Returns g or h; + /// glm::dvec3 t = glm::mix(e, f, a); // Types of the third parameter is not required to match with the first and the second. + /// glm::vec4 u = glm::mix(g, h, r); // Interpolations can be perform per component with a vector for the last parameter. + /// @endcode + template + GLM_FUNC_DECL genTypeT mix(genTypeT x, genTypeT y, genTypeU a); + + template + GLM_FUNC_DECL vec mix(vec const& x, vec const& y, vec const& a); + + template + GLM_FUNC_DECL vec mix(vec const& x, vec const& y, U a); + + /// Returns 0.0 if x < edge, otherwise it returns 1.0 for each component of a genType. + /// + /// @see GLSL step man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL genType step(genType edge, genType x); + + /// Returns 0.0 if x < edge, otherwise it returns 1.0. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL step man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec step(T edge, vec const& x); + + /// Returns 0.0 if x < edge, otherwise it returns 1.0. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL step man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec step(vec const& edge, vec const& x); + + /// Returns 0.0 if x <= edge0 and 1.0 if x >= edge1 and + /// performs smooth Hermite interpolation between 0 and 1 + /// when edge0 < x < edge1. This is useful in cases where + /// you would want a threshold function with a smooth + /// transition. This is equivalent to: + /// genType t; + /// t = clamp ((x - edge0) / (edge1 - edge0), 0, 1); + /// return t * t * (3 - 2 * t); + /// Results are undefined if edge0 >= edge1. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see GLSL smoothstep man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL genType smoothstep(genType edge0, genType edge1, genType x); + + template + GLM_FUNC_DECL vec smoothstep(T edge0, T edge1, vec const& x); + + template + GLM_FUNC_DECL vec smoothstep(vec const& edge0, vec const& edge1, vec const& x); + + /// Returns true if x holds a NaN (not a number) + /// representation in the underlying implementation's set of + /// floating point representations. Returns false otherwise, + /// including for implementations with no NaN + /// representations. + /// + /// /!\ When using compiler fast math, this function may fail. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL isnan man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec isnan(vec const& x); + + /// Returns true if x holds a positive infinity or negative + /// infinity representation in the underlying implementation's + /// set of floating point representations. Returns false + /// otherwise, including for implementations with no infinity + /// representations. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL isinf man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec isinf(vec const& x); + + /// Returns a signed integer value representing + /// the encoding of a floating-point value. The floating-point + /// value's bit-level representation is preserved. + /// + /// @see GLSL floatBitsToInt man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + GLM_FUNC_DECL int floatBitsToInt(float const& v); + + /// Returns a signed integer value representing + /// the encoding of a floating-point value. The floatingpoint + /// value's bit-level representation is preserved. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL floatBitsToInt man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec floatBitsToInt(vec const& v); + + /// Returns a unsigned integer value representing + /// the encoding of a floating-point value. The floatingpoint + /// value's bit-level representation is preserved. + /// + /// @see GLSL floatBitsToUint man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + GLM_FUNC_DECL uint floatBitsToUint(float const& v); + + /// Returns a unsigned integer value representing + /// the encoding of a floating-point value. The floatingpoint + /// value's bit-level representation is preserved. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL floatBitsToUint man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec floatBitsToUint(vec const& v); + + /// Returns a floating-point value corresponding to a signed + /// integer encoding of a floating-point value. + /// If an inf or NaN is passed in, it will not signal, and the + /// resulting floating point value is unspecified. Otherwise, + /// the bit-level representation is preserved. + /// + /// @see GLSL intBitsToFloat man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + GLM_FUNC_DECL float intBitsToFloat(int const& v); + + /// Returns a floating-point value corresponding to a signed + /// integer encoding of a floating-point value. + /// If an inf or NaN is passed in, it will not signal, and the + /// resulting floating point value is unspecified. Otherwise, + /// the bit-level representation is preserved. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL intBitsToFloat man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec intBitsToFloat(vec const& v); + + /// Returns a floating-point value corresponding to a + /// unsigned integer encoding of a floating-point value. + /// If an inf or NaN is passed in, it will not signal, and the + /// resulting floating point value is unspecified. Otherwise, + /// the bit-level representation is preserved. + /// + /// @see GLSL uintBitsToFloat man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + GLM_FUNC_DECL float uintBitsToFloat(uint const& v); + + /// Returns a floating-point value corresponding to a + /// unsigned integer encoding of a floating-point value. + /// If an inf or NaN is passed in, it will not signal, and the + /// resulting floating point value is unspecified. Otherwise, + /// the bit-level representation is preserved. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL uintBitsToFloat man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL vec uintBitsToFloat(vec const& v); + + /// Computes and returns a * b + c. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see GLSL fma man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL genType fma(genType const& a, genType const& b, genType const& c); + + /// Splits x into a floating-point significand in the range + /// [0.5, 1.0) and an integral exponent of two, such that: + /// x = significand * exp(2, exponent) + /// + /// The significand is returned by the function and the + /// exponent is returned in the parameter exp. For a + /// floating-point value of zero, the significant and exponent + /// are both zero. For a floating-point value that is an + /// infinity or is not a number, the results are undefined. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see GLSL frexp man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL genType frexp(genType x, int& exp); + + template + GLM_FUNC_DECL vec frexp(vec const& v, vec& exp); + + /// Builds a floating-point number from x and the + /// corresponding integral exponent of two in exp, returning: + /// significand * exp(2, exponent) + /// + /// If this product is too large to be represented in the + /// floating-point type, the result is undefined. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see GLSL ldexp man page; + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL genType ldexp(genType const& x, int const& exp); + + template + GLM_FUNC_DECL vec ldexp(vec const& v, vec const& exp); + + /// @} +}//namespace glm + +#include "detail/func_common.inl" + diff --git a/MacroLibX/third_party/glm/detail/_features.hpp b/MacroLibX/third_party/glm/detail/_features.hpp new file mode 100644 index 0000000..b0cbe9f --- /dev/null +++ b/MacroLibX/third_party/glm/detail/_features.hpp @@ -0,0 +1,394 @@ +#pragma once + +// #define GLM_CXX98_EXCEPTIONS +// #define GLM_CXX98_RTTI + +// #define GLM_CXX11_RVALUE_REFERENCES +// Rvalue references - GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n2118.html + +// GLM_CXX11_TRAILING_RETURN +// Rvalue references for *this - GCC not supported +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2439.htm + +// GLM_CXX11_NONSTATIC_MEMBER_INIT +// Initialization of class objects by rvalues - GCC any +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1610.html + +// GLM_CXX11_NONSTATIC_MEMBER_INIT +// Non-static data member initializers - GCC 4.7 +// http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2008/n2756.htm + +// #define GLM_CXX11_VARIADIC_TEMPLATE +// Variadic templates - GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2242.pdf + +// +// Extending variadic template template parameters - GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2555.pdf + +// #define GLM_CXX11_GENERALIZED_INITIALIZERS +// Initializer lists - GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2672.htm + +// #define GLM_CXX11_STATIC_ASSERT +// Static assertions - GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1720.html + +// #define GLM_CXX11_AUTO_TYPE +// auto-typed variables - GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1984.pdf + +// #define GLM_CXX11_AUTO_TYPE +// Multi-declarator auto - GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1737.pdf + +// #define GLM_CXX11_AUTO_TYPE +// Removal of auto as a storage-class specifier - GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2546.htm + +// #define GLM_CXX11_AUTO_TYPE +// New function declarator syntax - GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2541.htm + +// #define GLM_CXX11_LAMBDAS +// New wording for C++0x lambdas - GCC 4.5 +// http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2927.pdf + +// #define GLM_CXX11_DECLTYPE +// Declared type of an expression - GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2343.pdf + +// +// Right angle brackets - GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1757.html + +// +// Default template arguments for function templates DR226 GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#226 + +// +// Solving the SFINAE problem for expressions DR339 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2634.html + +// #define GLM_CXX11_ALIAS_TEMPLATE +// Template aliases N2258 GCC 4.7 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2258.pdf + +// +// Extern templates N1987 Yes +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1987.htm + +// #define GLM_CXX11_NULLPTR +// Null pointer constant N2431 GCC 4.6 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2431.pdf + +// #define GLM_CXX11_STRONG_ENUMS +// Strongly-typed enums N2347 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2347.pdf + +// +// Forward declarations for enums N2764 GCC 4.6 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2764.pdf + +// +// Generalized attributes N2761 GCC 4.8 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2761.pdf + +// +// Generalized constant expressions N2235 GCC 4.6 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2235.pdf + +// +// Alignment support N2341 GCC 4.8 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2341.pdf + +// #define GLM_CXX11_DELEGATING_CONSTRUCTORS +// Delegating constructors N1986 GCC 4.7 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1986.pdf + +// +// Inheriting constructors N2540 GCC 4.8 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2540.htm + +// #define GLM_CXX11_EXPLICIT_CONVERSIONS +// Explicit conversion operators N2437 GCC 4.5 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2437.pdf + +// +// New character types N2249 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2249.html + +// +// Unicode string literals N2442 GCC 4.5 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2442.htm + +// +// Raw string literals N2442 GCC 4.5 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2442.htm + +// +// Universal character name literals N2170 GCC 4.5 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2170.html + +// #define GLM_CXX11_USER_LITERALS +// User-defined literals N2765 GCC 4.7 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2765.pdf + +// +// Standard Layout Types N2342 GCC 4.5 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2342.htm + +// #define GLM_CXX11_DEFAULTED_FUNCTIONS +// #define GLM_CXX11_DELETED_FUNCTIONS +// Defaulted and deleted functions N2346 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2346.htm + +// +// Extended friend declarations N1791 GCC 4.7 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1791.pdf + +// +// Extending sizeof N2253 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2253.html + +// #define GLM_CXX11_INLINE_NAMESPACES +// Inline namespaces N2535 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2535.htm + +// #define GLM_CXX11_UNRESTRICTED_UNIONS +// Unrestricted unions N2544 GCC 4.6 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2544.pdf + +// #define GLM_CXX11_LOCAL_TYPE_TEMPLATE_ARGS +// Local and unnamed types as template arguments N2657 GCC 4.5 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2657.htm + +// #define GLM_CXX11_RANGE_FOR +// Range-based for N2930 GCC 4.6 +// http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2930.html + +// #define GLM_CXX11_OVERRIDE_CONTROL +// Explicit virtual overrides N2928 N3206 N3272 GCC 4.7 +// http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2928.htm +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3206.htm +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3272.htm + +// +// Minimal support for garbage collection and reachability-based leak detection N2670 No +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2670.htm + +// #define GLM_CXX11_NOEXCEPT +// Allowing move constructors to throw [noexcept] N3050 GCC 4.6 (core language only) +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3050.html + +// +// Defining move special member functions N3053 GCC 4.6 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3053.html + +// +// Sequence points N2239 Yes +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2239.html + +// +// Atomic operations N2427 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2239.html + +// +// Strong Compare and Exchange N2748 GCC 4.5 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html + +// +// Bidirectional Fences N2752 GCC 4.8 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2752.htm + +// +// Memory model N2429 GCC 4.8 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2429.htm + +// +// Data-dependency ordering: atomics and memory model N2664 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2664.htm + +// +// Propagating exceptions N2179 GCC 4.4 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2179.html + +// +// Abandoning a process and at_quick_exit N2440 GCC 4.8 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2440.htm + +// +// Allow atomics use in signal handlers N2547 Yes +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2547.htm + +// +// Thread-local storage N2659 GCC 4.8 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2659.htm + +// +// Dynamic initialization and destruction with concurrency N2660 GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2660.htm + +// +// __func__ predefined identifier N2340 GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2340.htm + +// +// C99 preprocessor N1653 GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1653.htm + +// +// long long N1811 GCC 4.3 +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1811.pdf + +// +// Extended integral types N1988 Yes +// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1988.pdf + +#if(GLM_COMPILER & GLM_COMPILER_GCC) + +# define GLM_CXX11_STATIC_ASSERT + +#elif(GLM_COMPILER & GLM_COMPILER_CLANG) +# if(__has_feature(cxx_exceptions)) +# define GLM_CXX98_EXCEPTIONS +# endif + +# if(__has_feature(cxx_rtti)) +# define GLM_CXX98_RTTI +# endif + +# if(__has_feature(cxx_access_control_sfinae)) +# define GLM_CXX11_ACCESS_CONTROL_SFINAE +# endif + +# if(__has_feature(cxx_alias_templates)) +# define GLM_CXX11_ALIAS_TEMPLATE +# endif + +# if(__has_feature(cxx_alignas)) +# define GLM_CXX11_ALIGNAS +# endif + +# if(__has_feature(cxx_attributes)) +# define GLM_CXX11_ATTRIBUTES +# endif + +# if(__has_feature(cxx_constexpr)) +# define GLM_CXX11_CONSTEXPR +# endif + +# if(__has_feature(cxx_decltype)) +# define GLM_CXX11_DECLTYPE +# endif + +# if(__has_feature(cxx_default_function_template_args)) +# define GLM_CXX11_DEFAULT_FUNCTION_TEMPLATE_ARGS +# endif + +# if(__has_feature(cxx_defaulted_functions)) +# define GLM_CXX11_DEFAULTED_FUNCTIONS +# endif + +# if(__has_feature(cxx_delegating_constructors)) +# define GLM_CXX11_DELEGATING_CONSTRUCTORS +# endif + +# if(__has_feature(cxx_deleted_functions)) +# define GLM_CXX11_DELETED_FUNCTIONS +# endif + +# if(__has_feature(cxx_explicit_conversions)) +# define GLM_CXX11_EXPLICIT_CONVERSIONS +# endif + +# if(__has_feature(cxx_generalized_initializers)) +# define GLM_CXX11_GENERALIZED_INITIALIZERS +# endif + +# if(__has_feature(cxx_implicit_moves)) +# define GLM_CXX11_IMPLICIT_MOVES +# endif + +# if(__has_feature(cxx_inheriting_constructors)) +# define GLM_CXX11_INHERITING_CONSTRUCTORS +# endif + +# if(__has_feature(cxx_inline_namespaces)) +# define GLM_CXX11_INLINE_NAMESPACES +# endif + +# if(__has_feature(cxx_lambdas)) +# define GLM_CXX11_LAMBDAS +# endif + +# if(__has_feature(cxx_local_type_template_args)) +# define GLM_CXX11_LOCAL_TYPE_TEMPLATE_ARGS +# endif + +# if(__has_feature(cxx_noexcept)) +# define GLM_CXX11_NOEXCEPT +# endif + +# if(__has_feature(cxx_nonstatic_member_init)) +# define GLM_CXX11_NONSTATIC_MEMBER_INIT +# endif + +# if(__has_feature(cxx_nullptr)) +# define GLM_CXX11_NULLPTR +# endif + +# if(__has_feature(cxx_override_control)) +# define GLM_CXX11_OVERRIDE_CONTROL +# endif + +# if(__has_feature(cxx_reference_qualified_functions)) +# define GLM_CXX11_REFERENCE_QUALIFIED_FUNCTIONS +# endif + +# if(__has_feature(cxx_range_for)) +# define GLM_CXX11_RANGE_FOR +# endif + +# if(__has_feature(cxx_raw_string_literals)) +# define GLM_CXX11_RAW_STRING_LITERALS +# endif + +# if(__has_feature(cxx_rvalue_references)) +# define GLM_CXX11_RVALUE_REFERENCES +# endif + +# if(__has_feature(cxx_static_assert)) +# define GLM_CXX11_STATIC_ASSERT +# endif + +# if(__has_feature(cxx_auto_type)) +# define GLM_CXX11_AUTO_TYPE +# endif + +# if(__has_feature(cxx_strong_enums)) +# define GLM_CXX11_STRONG_ENUMS +# endif + +# if(__has_feature(cxx_trailing_return)) +# define GLM_CXX11_TRAILING_RETURN +# endif + +# if(__has_feature(cxx_unicode_literals)) +# define GLM_CXX11_UNICODE_LITERALS +# endif + +# if(__has_feature(cxx_unrestricted_unions)) +# define GLM_CXX11_UNRESTRICTED_UNIONS +# endif + +# if(__has_feature(cxx_user_literals)) +# define GLM_CXX11_USER_LITERALS +# endif + +# if(__has_feature(cxx_variadic_templates)) +# define GLM_CXX11_VARIADIC_TEMPLATES +# endif + +#endif//(GLM_COMPILER & GLM_COMPILER_CLANG) diff --git a/MacroLibX/third_party/glm/detail/_fixes.hpp b/MacroLibX/third_party/glm/detail/_fixes.hpp new file mode 100644 index 0000000..a503c7c --- /dev/null +++ b/MacroLibX/third_party/glm/detail/_fixes.hpp @@ -0,0 +1,27 @@ +#include + +//! Workaround for compatibility with other libraries +#ifdef max +#undef max +#endif + +//! Workaround for compatibility with other libraries +#ifdef min +#undef min +#endif + +//! Workaround for Android +#ifdef isnan +#undef isnan +#endif + +//! Workaround for Android +#ifdef isinf +#undef isinf +#endif + +//! Workaround for Chrone Native Client +#ifdef log2 +#undef log2 +#endif + diff --git a/MacroLibX/third_party/glm/detail/_noise.hpp b/MacroLibX/third_party/glm/detail/_noise.hpp new file mode 100644 index 0000000..5a874a0 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/_noise.hpp @@ -0,0 +1,81 @@ +#pragma once + +#include "../common.hpp" + +namespace glm{ +namespace detail +{ + template + GLM_FUNC_QUALIFIER T mod289(T const& x) + { + return x - floor(x * (static_cast(1.0) / static_cast(289.0))) * static_cast(289.0); + } + + template + GLM_FUNC_QUALIFIER T permute(T const& x) + { + return mod289(((x * static_cast(34)) + static_cast(1)) * x); + } + + template + GLM_FUNC_QUALIFIER vec<2, T, Q> permute(vec<2, T, Q> const& x) + { + return mod289(((x * static_cast(34)) + static_cast(1)) * x); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> permute(vec<3, T, Q> const& x) + { + return mod289(((x * static_cast(34)) + static_cast(1)) * x); + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> permute(vec<4, T, Q> const& x) + { + return mod289(((x * static_cast(34)) + static_cast(1)) * x); + } + + template + GLM_FUNC_QUALIFIER T taylorInvSqrt(T const& r) + { + return static_cast(1.79284291400159) - static_cast(0.85373472095314) * r; + } + + template + GLM_FUNC_QUALIFIER vec<2, T, Q> taylorInvSqrt(vec<2, T, Q> const& r) + { + return static_cast(1.79284291400159) - static_cast(0.85373472095314) * r; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> taylorInvSqrt(vec<3, T, Q> const& r) + { + return static_cast(1.79284291400159) - static_cast(0.85373472095314) * r; + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> taylorInvSqrt(vec<4, T, Q> const& r) + { + return static_cast(1.79284291400159) - static_cast(0.85373472095314) * r; + } + + template + GLM_FUNC_QUALIFIER vec<2, T, Q> fade(vec<2, T, Q> const& t) + { + return (t * t * t) * (t * (t * static_cast(6) - static_cast(15)) + static_cast(10)); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> fade(vec<3, T, Q> const& t) + { + return (t * t * t) * (t * (t * static_cast(6) - static_cast(15)) + static_cast(10)); + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> fade(vec<4, T, Q> const& t) + { + return (t * t * t) * (t * (t * static_cast(6) - static_cast(15)) + static_cast(10)); + } +}//namespace detail +}//namespace glm + diff --git a/MacroLibX/third_party/glm/detail/_swizzle.hpp b/MacroLibX/third_party/glm/detail/_swizzle.hpp new file mode 100644 index 0000000..87896ef --- /dev/null +++ b/MacroLibX/third_party/glm/detail/_swizzle.hpp @@ -0,0 +1,804 @@ +#pragma once + +namespace glm{ +namespace detail +{ + // Internal class for implementing swizzle operators + template + struct _swizzle_base0 + { + protected: + GLM_FUNC_QUALIFIER T& elem(size_t i){ return (reinterpret_cast(_buffer))[i]; } + GLM_FUNC_QUALIFIER T const& elem(size_t i) const{ return (reinterpret_cast(_buffer))[i]; } + + // Use an opaque buffer to *ensure* the compiler doesn't call a constructor. + // The size 1 buffer is assumed to aligned to the actual members so that the + // elem() + char _buffer[1]; + }; + + template + struct _swizzle_base1 : public _swizzle_base0 + { + }; + + template + struct _swizzle_base1<2, T, Q, E0,E1,-1,-2, Aligned> : public _swizzle_base0 + { + GLM_FUNC_QUALIFIER vec<2, T, Q> operator ()() const { return vec<2, T, Q>(this->elem(E0), this->elem(E1)); } + }; + + template + struct _swizzle_base1<3, T, Q, E0,E1,E2,-1, Aligned> : public _swizzle_base0 + { + GLM_FUNC_QUALIFIER vec<3, T, Q> operator ()() const { return vec<3, T, Q>(this->elem(E0), this->elem(E1), this->elem(E2)); } + }; + + template + struct _swizzle_base1<4, T, Q, E0,E1,E2,E3, Aligned> : public _swizzle_base0 + { + GLM_FUNC_QUALIFIER vec<4, T, Q> operator ()() const { return vec<4, T, Q>(this->elem(E0), this->elem(E1), this->elem(E2), this->elem(E3)); } + }; + + // Internal class for implementing swizzle operators + /* + Template parameters: + + T = type of scalar values (e.g. float, double) + N = number of components in the vector (e.g. 3) + E0...3 = what index the n-th element of this swizzle refers to in the unswizzled vec + + DUPLICATE_ELEMENTS = 1 if there is a repeated element, 0 otherwise (used to specialize swizzles + containing duplicate elements so that they cannot be used as r-values). + */ + template + struct _swizzle_base2 : public _swizzle_base1::value> + { + struct op_equal + { + GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e = t; } + }; + + struct op_minus + { + GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e -= t; } + }; + + struct op_plus + { + GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e += t; } + }; + + struct op_mul + { + GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e *= t; } + }; + + struct op_div + { + GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e /= t; } + }; + + public: + GLM_FUNC_QUALIFIER _swizzle_base2& operator= (const T& t) + { + for (int i = 0; i < N; ++i) + (*this)[i] = t; + return *this; + } + + GLM_FUNC_QUALIFIER _swizzle_base2& operator= (vec const& that) + { + _apply_op(that, op_equal()); + return *this; + } + + GLM_FUNC_QUALIFIER void operator -= (vec const& that) + { + _apply_op(that, op_minus()); + } + + GLM_FUNC_QUALIFIER void operator += (vec const& that) + { + _apply_op(that, op_plus()); + } + + GLM_FUNC_QUALIFIER void operator *= (vec const& that) + { + _apply_op(that, op_mul()); + } + + GLM_FUNC_QUALIFIER void operator /= (vec const& that) + { + _apply_op(that, op_div()); + } + + GLM_FUNC_QUALIFIER T& operator[](size_t i) + { + const int offset_dst[4] = { E0, E1, E2, E3 }; + return this->elem(offset_dst[i]); + } + GLM_FUNC_QUALIFIER T operator[](size_t i) const + { + const int offset_dst[4] = { E0, E1, E2, E3 }; + return this->elem(offset_dst[i]); + } + + protected: + template + GLM_FUNC_QUALIFIER void _apply_op(vec const& that, const U& op) + { + // Make a copy of the data in this == &that. + // The copier should optimize out the copy in cases where the function is + // properly inlined and the copy is not necessary. + T t[N]; + for (int i = 0; i < N; ++i) + t[i] = that[i]; + for (int i = 0; i < N; ++i) + op( (*this)[i], t[i] ); + } + }; + + // Specialization for swizzles containing duplicate elements. These cannot be modified. + template + struct _swizzle_base2 : public _swizzle_base1::value> + { + struct Stub {}; + + GLM_FUNC_QUALIFIER _swizzle_base2& operator= (Stub const&) { return *this; } + + GLM_FUNC_QUALIFIER T operator[] (size_t i) const + { + const int offset_dst[4] = { E0, E1, E2, E3 }; + return this->elem(offset_dst[i]); + } + }; + + template + struct _swizzle : public _swizzle_base2 + { + typedef _swizzle_base2 base_type; + + using base_type::operator=; + + GLM_FUNC_QUALIFIER operator vec () const { return (*this)(); } + }; + +// +// To prevent the C++ syntax from getting entirely overwhelming, define some alias macros +// +#define GLM_SWIZZLE_TEMPLATE1 template +#define GLM_SWIZZLE_TEMPLATE2 template +#define GLM_SWIZZLE_TYPE1 _swizzle +#define GLM_SWIZZLE_TYPE2 _swizzle + +// +// Wrapper for a binary operator (e.g. u.yy + v.zy) +// +#define GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(OPERAND) \ + GLM_SWIZZLE_TEMPLATE2 \ + GLM_FUNC_QUALIFIER vec operator OPERAND ( const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE2& b) \ + { \ + return a() OPERAND b(); \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER vec operator OPERAND ( const GLM_SWIZZLE_TYPE1& a, const vec& b) \ + { \ + return a() OPERAND b; \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER vec operator OPERAND ( const vec& a, const GLM_SWIZZLE_TYPE1& b) \ + { \ + return a OPERAND b(); \ + } + +// +// Wrapper for a operand between a swizzle and a binary (e.g. 1.0f - u.xyz) +// +#define GLM_SWIZZLE_SCALAR_BINARY_OPERATOR_IMPLEMENTATION(OPERAND) \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER vec operator OPERAND ( const GLM_SWIZZLE_TYPE1& a, const T& b) \ + { \ + return a() OPERAND b; \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER vec operator OPERAND ( const T& a, const GLM_SWIZZLE_TYPE1& b) \ + { \ + return a OPERAND b(); \ + } + +// +// Macro for wrapping a function taking one argument (e.g. abs()) +// +#define GLM_SWIZZLE_FUNCTION_1_ARGS(RETURN_TYPE,FUNCTION) \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a) \ + { \ + return FUNCTION(a()); \ + } + +// +// Macro for wrapping a function taking two vector arguments (e.g. dot()). +// +#define GLM_SWIZZLE_FUNCTION_2_ARGS(RETURN_TYPE,FUNCTION) \ + GLM_SWIZZLE_TEMPLATE2 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE2& b) \ + { \ + return FUNCTION(a(), b()); \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE1& b) \ + { \ + return FUNCTION(a(), b()); \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const typename V& b) \ + { \ + return FUNCTION(a(), b); \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const V& a, const GLM_SWIZZLE_TYPE1& b) \ + { \ + return FUNCTION(a, b()); \ + } + +// +// Macro for wrapping a function take 2 vec arguments followed by a scalar (e.g. mix()). +// +#define GLM_SWIZZLE_FUNCTION_2_ARGS_SCALAR(RETURN_TYPE,FUNCTION) \ + GLM_SWIZZLE_TEMPLATE2 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE2& b, const T& c) \ + { \ + return FUNCTION(a(), b(), c); \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE1& b, const T& c) \ + { \ + return FUNCTION(a(), b(), c); \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const typename S0::vec_type& b, const T& c)\ + { \ + return FUNCTION(a(), b, c); \ + } \ + GLM_SWIZZLE_TEMPLATE1 \ + GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const typename V& a, const GLM_SWIZZLE_TYPE1& b, const T& c) \ + { \ + return FUNCTION(a, b(), c); \ + } + +}//namespace detail +}//namespace glm + +namespace glm +{ + namespace detail + { + GLM_SWIZZLE_SCALAR_BINARY_OPERATOR_IMPLEMENTATION(-) + GLM_SWIZZLE_SCALAR_BINARY_OPERATOR_IMPLEMENTATION(*) + GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(+) + GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(-) + GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(*) + GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(/) + } + + // + // Swizzles are distinct types from the unswizzled type. The below macros will + // provide template specializations for the swizzle types for the given functions + // so that the compiler does not have any ambiguity to choosing how to handle + // the function. + // + // The alternative is to use the operator()() when calling the function in order + // to explicitly convert the swizzled type to the unswizzled type. + // + + //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, abs); + //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, acos); + //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, acosh); + //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, all); + //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, any); + + //GLM_SWIZZLE_FUNCTION_2_ARGS(value_type, dot); + //GLM_SWIZZLE_FUNCTION_2_ARGS(vec_type, cross); + //GLM_SWIZZLE_FUNCTION_2_ARGS(vec_type, step); + //GLM_SWIZZLE_FUNCTION_2_ARGS_SCALAR(vec_type, mix); +} + +#define GLM_SWIZZLE2_2_MEMBERS(T, Q, E0,E1) \ + struct { detail::_swizzle<2, T, Q, 0,0,-1,-2> E0 ## E0; }; \ + struct { detail::_swizzle<2, T, Q, 0,1,-1,-2> E0 ## E1; }; \ + struct { detail::_swizzle<2, T, Q, 1,0,-1,-2> E1 ## E0; }; \ + struct { detail::_swizzle<2, T, Q, 1,1,-1,-2> E1 ## E1; }; + +#define GLM_SWIZZLE2_3_MEMBERS(T, Q, E0,E1) \ + struct { detail::_swizzle<3,T, Q, 0,0,0,-1> E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<3,T, Q, 0,0,1,-1> E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<3,T, Q, 0,1,0,-1> E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<3,T, Q, 0,1,1,-1> E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<3,T, Q, 1,0,0,-1> E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<3,T, Q, 1,0,1,-1> E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<3,T, Q, 1,1,0,-1> E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<3,T, Q, 1,1,1,-1> E1 ## E1 ## E1; }; + +#define GLM_SWIZZLE2_4_MEMBERS(T, Q, E0,E1) \ + struct { detail::_swizzle<4,T, Q, 0,0,0,0> E0 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,0,1> E0 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,1,0> E0 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,1,1> E0 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,0,0> E0 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,0,1> E0 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,1,0> E0 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,1,1> E0 ## E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,0,0> E1 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,0,1> E1 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,1,0> E1 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,1,1> E1 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,0,0> E1 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,0,1> E1 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,1,0> E1 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,1,1> E1 ## E1 ## E1 ## E1; }; + +#define GLM_SWIZZLE3_2_MEMBERS(T, Q, E0,E1,E2) \ + struct { detail::_swizzle<2,T, Q, 0,0,-1,-2> E0 ## E0; }; \ + struct { detail::_swizzle<2,T, Q, 0,1,-1,-2> E0 ## E1; }; \ + struct { detail::_swizzle<2,T, Q, 0,2,-1,-2> E0 ## E2; }; \ + struct { detail::_swizzle<2,T, Q, 1,0,-1,-2> E1 ## E0; }; \ + struct { detail::_swizzle<2,T, Q, 1,1,-1,-2> E1 ## E1; }; \ + struct { detail::_swizzle<2,T, Q, 1,2,-1,-2> E1 ## E2; }; \ + struct { detail::_swizzle<2,T, Q, 2,0,-1,-2> E2 ## E0; }; \ + struct { detail::_swizzle<2,T, Q, 2,1,-1,-2> E2 ## E1; }; \ + struct { detail::_swizzle<2,T, Q, 2,2,-1,-2> E2 ## E2; }; + +#define GLM_SWIZZLE3_3_MEMBERS(T, Q ,E0,E1,E2) \ + struct { detail::_swizzle<3, T, Q, 0,0,0,-1> E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 0,0,1,-1> E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 0,0,2,-1> E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 0,1,0,-1> E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 0,1,1,-1> E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 0,1,2,-1> E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 0,2,0,-1> E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 0,2,1,-1> E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 0,2,2,-1> E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 1,0,0,-1> E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 1,0,1,-1> E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 1,0,2,-1> E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 1,1,0,-1> E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 1,1,1,-1> E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 1,1,2,-1> E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 1,2,0,-1> E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 1,2,1,-1> E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 1,2,2,-1> E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 2,0,0,-1> E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 2,0,1,-1> E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 2,0,2,-1> E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 2,1,0,-1> E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 2,1,1,-1> E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 2,1,2,-1> E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 2,2,0,-1> E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 2,2,1,-1> E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 2,2,2,-1> E2 ## E2 ## E2; }; + +#define GLM_SWIZZLE3_4_MEMBERS(T, Q, E0,E1,E2) \ + struct { detail::_swizzle<4,T, Q, 0,0,0,0> E0 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,0,1> E0 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,0,2> E0 ## E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,1,0> E0 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,1,1> E0 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,1,2> E0 ## E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,2,0> E0 ## E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,2,1> E0 ## E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,0,2,2> E0 ## E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,0,0> E0 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,0,1> E0 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,0,2> E0 ## E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,1,0> E0 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,1,1> E0 ## E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,1,2> E0 ## E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,2,0> E0 ## E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,2,1> E0 ## E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,1,2,2> E0 ## E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,0,0> E0 ## E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,0,1> E0 ## E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,0,2> E0 ## E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,1,0> E0 ## E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,1,1> E0 ## E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,1,2> E0 ## E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,2,0> E0 ## E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,2,1> E0 ## E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 0,2,2,2> E0 ## E2 ## E2 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,0,0> E1 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,0,1> E1 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,0,2> E1 ## E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,1,0> E1 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,1,1> E1 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,1,2> E1 ## E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,2,0> E1 ## E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,2,1> E1 ## E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,0,2,2> E1 ## E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,0,0> E1 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,0,1> E1 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,0,2> E1 ## E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,1,0> E1 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,1,1> E1 ## E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,1,2> E1 ## E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,2,0> E1 ## E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,2,1> E1 ## E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,1,2,2> E1 ## E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,0,0> E1 ## E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,0,1> E1 ## E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,0,2> E1 ## E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,1,0> E1 ## E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,1,1> E1 ## E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,1,2> E1 ## E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,2,0> E1 ## E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,2,1> E1 ## E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 1,2,2,2> E1 ## E2 ## E2 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,0,0> E2 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,0,1> E2 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,0,2> E2 ## E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,1,0> E2 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,1,1> E2 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,1,2> E2 ## E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,2,0> E2 ## E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,2,1> E2 ## E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,0,2,2> E2 ## E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,0,0> E2 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,0,1> E2 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,0,2> E2 ## E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,1,0> E2 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,1,1> E2 ## E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,1,2> E2 ## E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,2,0> E2 ## E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,2,1> E2 ## E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,1,2,2> E2 ## E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,0,0> E2 ## E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,0,1> E2 ## E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,0,2> E2 ## E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,1,0> E2 ## E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,1,1> E2 ## E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,1,2> E2 ## E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,2,0> E2 ## E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,2,1> E2 ## E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<4,T, Q, 2,2,2,2> E2 ## E2 ## E2 ## E2; }; + +#define GLM_SWIZZLE4_2_MEMBERS(T, Q, E0,E1,E2,E3) \ + struct { detail::_swizzle<2,T, Q, 0,0,-1,-2> E0 ## E0; }; \ + struct { detail::_swizzle<2,T, Q, 0,1,-1,-2> E0 ## E1; }; \ + struct { detail::_swizzle<2,T, Q, 0,2,-1,-2> E0 ## E2; }; \ + struct { detail::_swizzle<2,T, Q, 0,3,-1,-2> E0 ## E3; }; \ + struct { detail::_swizzle<2,T, Q, 1,0,-1,-2> E1 ## E0; }; \ + struct { detail::_swizzle<2,T, Q, 1,1,-1,-2> E1 ## E1; }; \ + struct { detail::_swizzle<2,T, Q, 1,2,-1,-2> E1 ## E2; }; \ + struct { detail::_swizzle<2,T, Q, 1,3,-1,-2> E1 ## E3; }; \ + struct { detail::_swizzle<2,T, Q, 2,0,-1,-2> E2 ## E0; }; \ + struct { detail::_swizzle<2,T, Q, 2,1,-1,-2> E2 ## E1; }; \ + struct { detail::_swizzle<2,T, Q, 2,2,-1,-2> E2 ## E2; }; \ + struct { detail::_swizzle<2,T, Q, 2,3,-1,-2> E2 ## E3; }; \ + struct { detail::_swizzle<2,T, Q, 3,0,-1,-2> E3 ## E0; }; \ + struct { detail::_swizzle<2,T, Q, 3,1,-1,-2> E3 ## E1; }; \ + struct { detail::_swizzle<2,T, Q, 3,2,-1,-2> E3 ## E2; }; \ + struct { detail::_swizzle<2,T, Q, 3,3,-1,-2> E3 ## E3; }; + +#define GLM_SWIZZLE4_3_MEMBERS(T, Q, E0,E1,E2,E3) \ + struct { detail::_swizzle<3, T, Q, 0,0,0,-1> E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 0,0,1,-1> E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 0,0,2,-1> E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 0,0,3,-1> E0 ## E0 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 0,1,0,-1> E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 0,1,1,-1> E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 0,1,2,-1> E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 0,1,3,-1> E0 ## E1 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 0,2,0,-1> E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 0,2,1,-1> E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 0,2,2,-1> E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 0,2,3,-1> E0 ## E2 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 0,3,0,-1> E0 ## E3 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 0,3,1,-1> E0 ## E3 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 0,3,2,-1> E0 ## E3 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 0,3,3,-1> E0 ## E3 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 1,0,0,-1> E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 1,0,1,-1> E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 1,0,2,-1> E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 1,0,3,-1> E1 ## E0 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 1,1,0,-1> E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 1,1,1,-1> E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 1,1,2,-1> E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 1,1,3,-1> E1 ## E1 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 1,2,0,-1> E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 1,2,1,-1> E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 1,2,2,-1> E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 1,2,3,-1> E1 ## E2 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 1,3,0,-1> E1 ## E3 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 1,3,1,-1> E1 ## E3 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 1,3,2,-1> E1 ## E3 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 1,3,3,-1> E1 ## E3 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 2,0,0,-1> E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 2,0,1,-1> E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 2,0,2,-1> E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 2,0,3,-1> E2 ## E0 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 2,1,0,-1> E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 2,1,1,-1> E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 2,1,2,-1> E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 2,1,3,-1> E2 ## E1 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 2,2,0,-1> E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 2,2,1,-1> E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 2,2,2,-1> E2 ## E2 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 2,2,3,-1> E2 ## E2 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 2,3,0,-1> E2 ## E3 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 2,3,1,-1> E2 ## E3 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 2,3,2,-1> E2 ## E3 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 2,3,3,-1> E2 ## E3 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 3,0,0,-1> E3 ## E0 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 3,0,1,-1> E3 ## E0 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 3,0,2,-1> E3 ## E0 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 3,0,3,-1> E3 ## E0 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 3,1,0,-1> E3 ## E1 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 3,1,1,-1> E3 ## E1 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 3,1,2,-1> E3 ## E1 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 3,1,3,-1> E3 ## E1 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 3,2,0,-1> E3 ## E2 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 3,2,1,-1> E3 ## E2 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 3,2,2,-1> E3 ## E2 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 3,2,3,-1> E3 ## E2 ## E3; }; \ + struct { detail::_swizzle<3, T, Q, 3,3,0,-1> E3 ## E3 ## E0; }; \ + struct { detail::_swizzle<3, T, Q, 3,3,1,-1> E3 ## E3 ## E1; }; \ + struct { detail::_swizzle<3, T, Q, 3,3,2,-1> E3 ## E3 ## E2; }; \ + struct { detail::_swizzle<3, T, Q, 3,3,3,-1> E3 ## E3 ## E3; }; + +#define GLM_SWIZZLE4_4_MEMBERS(T, Q, E0,E1,E2,E3) \ + struct { detail::_swizzle<4, T, Q, 0,0,0,0> E0 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,0,1> E0 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,0,2> E0 ## E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,0,3> E0 ## E0 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,1,0> E0 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,1,1> E0 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,1,2> E0 ## E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,1,3> E0 ## E0 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,2,0> E0 ## E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,2,1> E0 ## E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,2,2> E0 ## E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,2,3> E0 ## E0 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,3,0> E0 ## E0 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,3,1> E0 ## E0 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,3,2> E0 ## E0 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,0,3,3> E0 ## E0 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,0,0> E0 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,0,1> E0 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,0,2> E0 ## E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,0,3> E0 ## E1 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,1,0> E0 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,1,1> E0 ## E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,1,2> E0 ## E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,1,3> E0 ## E1 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,2,0> E0 ## E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,2,1> E0 ## E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,2,2> E0 ## E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,2,3> E0 ## E1 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,3,0> E0 ## E1 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,3,1> E0 ## E1 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,3,2> E0 ## E1 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,1,3,3> E0 ## E1 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,0,0> E0 ## E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,0,1> E0 ## E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,0,2> E0 ## E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,0,3> E0 ## E2 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,1,0> E0 ## E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,1,1> E0 ## E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,1,2> E0 ## E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,1,3> E0 ## E2 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,2,0> E0 ## E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,2,1> E0 ## E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,2,2> E0 ## E2 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,2,3> E0 ## E2 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,3,0> E0 ## E2 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,3,1> E0 ## E2 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,3,2> E0 ## E2 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,2,3,3> E0 ## E2 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,0,0> E0 ## E3 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,0,1> E0 ## E3 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,0,2> E0 ## E3 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,0,3> E0 ## E3 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,1,0> E0 ## E3 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,1,1> E0 ## E3 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,1,2> E0 ## E3 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,1,3> E0 ## E3 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,2,0> E0 ## E3 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,2,1> E0 ## E3 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,2,2> E0 ## E3 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,2,3> E0 ## E3 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,3,0> E0 ## E3 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,3,1> E0 ## E3 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,3,2> E0 ## E3 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 0,3,3,3> E0 ## E3 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,0,0> E1 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,0,1> E1 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,0,2> E1 ## E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,0,3> E1 ## E0 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,1,0> E1 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,1,1> E1 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,1,2> E1 ## E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,1,3> E1 ## E0 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,2,0> E1 ## E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,2,1> E1 ## E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,2,2> E1 ## E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,2,3> E1 ## E0 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,3,0> E1 ## E0 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,3,1> E1 ## E0 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,3,2> E1 ## E0 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,0,3,3> E1 ## E0 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,0,0> E1 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,0,1> E1 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,0,2> E1 ## E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,0,3> E1 ## E1 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,1,0> E1 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,1,1> E1 ## E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,1,2> E1 ## E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,1,3> E1 ## E1 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,2,0> E1 ## E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,2,1> E1 ## E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,2,2> E1 ## E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,2,3> E1 ## E1 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,3,0> E1 ## E1 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,3,1> E1 ## E1 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,3,2> E1 ## E1 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,1,3,3> E1 ## E1 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,0,0> E1 ## E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,0,1> E1 ## E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,0,2> E1 ## E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,0,3> E1 ## E2 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,1,0> E1 ## E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,1,1> E1 ## E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,1,2> E1 ## E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,1,3> E1 ## E2 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,2,0> E1 ## E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,2,1> E1 ## E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,2,2> E1 ## E2 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,2,3> E1 ## E2 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,3,0> E1 ## E2 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,3,1> E1 ## E2 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,3,2> E1 ## E2 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,2,3,3> E1 ## E2 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,0,0> E1 ## E3 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,0,1> E1 ## E3 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,0,2> E1 ## E3 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,0,3> E1 ## E3 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,1,0> E1 ## E3 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,1,1> E1 ## E3 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,1,2> E1 ## E3 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,1,3> E1 ## E3 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,2,0> E1 ## E3 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,2,1> E1 ## E3 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,2,2> E1 ## E3 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,2,3> E1 ## E3 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,3,0> E1 ## E3 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,3,1> E1 ## E3 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,3,2> E1 ## E3 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 1,3,3,3> E1 ## E3 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,0,0> E2 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,0,1> E2 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,0,2> E2 ## E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,0,3> E2 ## E0 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,1,0> E2 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,1,1> E2 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,1,2> E2 ## E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,1,3> E2 ## E0 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,2,0> E2 ## E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,2,1> E2 ## E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,2,2> E2 ## E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,2,3> E2 ## E0 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,3,0> E2 ## E0 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,3,1> E2 ## E0 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,3,2> E2 ## E0 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,0,3,3> E2 ## E0 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,0,0> E2 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,0,1> E2 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,0,2> E2 ## E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,0,3> E2 ## E1 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,1,0> E2 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,1,1> E2 ## E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,1,2> E2 ## E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,1,3> E2 ## E1 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,2,0> E2 ## E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,2,1> E2 ## E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,2,2> E2 ## E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,2,3> E2 ## E1 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,3,0> E2 ## E1 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,3,1> E2 ## E1 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,3,2> E2 ## E1 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,1,3,3> E2 ## E1 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,0,0> E2 ## E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,0,1> E2 ## E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,0,2> E2 ## E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,0,3> E2 ## E2 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,1,0> E2 ## E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,1,1> E2 ## E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,1,2> E2 ## E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,1,3> E2 ## E2 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,2,0> E2 ## E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,2,1> E2 ## E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,2,2> E2 ## E2 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,2,3> E2 ## E2 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,3,0> E2 ## E2 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,3,1> E2 ## E2 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,3,2> E2 ## E2 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,2,3,3> E2 ## E2 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,0,0> E2 ## E3 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,0,1> E2 ## E3 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,0,2> E2 ## E3 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,0,3> E2 ## E3 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,1,0> E2 ## E3 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,1,1> E2 ## E3 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,1,2> E2 ## E3 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,1,3> E2 ## E3 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,2,0> E2 ## E3 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,2,1> E2 ## E3 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,2,2> E2 ## E3 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,2,3> E2 ## E3 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,3,0> E2 ## E3 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,3,1> E2 ## E3 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,3,2> E2 ## E3 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 2,3,3,3> E2 ## E3 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,0,0> E3 ## E0 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,0,1> E3 ## E0 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,0,2> E3 ## E0 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,0,3> E3 ## E0 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,1,0> E3 ## E0 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,1,1> E3 ## E0 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,1,2> E3 ## E0 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,1,3> E3 ## E0 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,2,0> E3 ## E0 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,2,1> E3 ## E0 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,2,2> E3 ## E0 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,2,3> E3 ## E0 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,3,0> E3 ## E0 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,3,1> E3 ## E0 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,3,2> E3 ## E0 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,0,3,3> E3 ## E0 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,0,0> E3 ## E1 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,0,1> E3 ## E1 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,0,2> E3 ## E1 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,0,3> E3 ## E1 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,1,0> E3 ## E1 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,1,1> E3 ## E1 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,1,2> E3 ## E1 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,1,3> E3 ## E1 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,2,0> E3 ## E1 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,2,1> E3 ## E1 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,2,2> E3 ## E1 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,2,3> E3 ## E1 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,3,0> E3 ## E1 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,3,1> E3 ## E1 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,3,2> E3 ## E1 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,1,3,3> E3 ## E1 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,0,0> E3 ## E2 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,0,1> E3 ## E2 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,0,2> E3 ## E2 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,0,3> E3 ## E2 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,1,0> E3 ## E2 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,1,1> E3 ## E2 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,1,2> E3 ## E2 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,1,3> E3 ## E2 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,2,0> E3 ## E2 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,2,1> E3 ## E2 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,2,2> E3 ## E2 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,2,3> E3 ## E2 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,3,0> E3 ## E2 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,3,1> E3 ## E2 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,3,2> E3 ## E2 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,2,3,3> E3 ## E2 ## E3 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,0,0> E3 ## E3 ## E0 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,0,1> E3 ## E3 ## E0 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,0,2> E3 ## E3 ## E0 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,0,3> E3 ## E3 ## E0 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,1,0> E3 ## E3 ## E1 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,1,1> E3 ## E3 ## E1 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,1,2> E3 ## E3 ## E1 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,1,3> E3 ## E3 ## E1 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,2,0> E3 ## E3 ## E2 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,2,1> E3 ## E3 ## E2 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,2,2> E3 ## E3 ## E2 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,2,3> E3 ## E3 ## E2 ## E3; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,3,0> E3 ## E3 ## E3 ## E0; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,3,1> E3 ## E3 ## E3 ## E1; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,3,2> E3 ## E3 ## E3 ## E2; }; \ + struct { detail::_swizzle<4, T, Q, 3,3,3,3> E3 ## E3 ## E3 ## E3; }; diff --git a/MacroLibX/third_party/glm/detail/_swizzle_func.hpp b/MacroLibX/third_party/glm/detail/_swizzle_func.hpp new file mode 100644 index 0000000..d93c6af --- /dev/null +++ b/MacroLibX/third_party/glm/detail/_swizzle_func.hpp @@ -0,0 +1,682 @@ +#pragma once + +#define GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, CONST, A, B) \ + vec<2, T, Q> A ## B() CONST \ + { \ + return vec<2, T, Q>(this->A, this->B); \ + } + +#define GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, CONST, A, B, C) \ + vec<3, T, Q> A ## B ## C() CONST \ + { \ + return vec<3, T, Q>(this->A, this->B, this->C); \ + } + +#define GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, CONST, A, B, C, D) \ + vec<4, T, Q> A ## B ## C ## D() CONST \ + { \ + return vec<4, T, Q>(this->A, this->B, this->C, this->D); \ + } + +#define GLM_SWIZZLE_GEN_VEC2_ENTRY_DEF(T, P, L, CONST, A, B) \ + template \ + vec vec::A ## B() CONST \ + { \ + return vec<2, T, Q>(this->A, this->B); \ + } + +#define GLM_SWIZZLE_GEN_VEC3_ENTRY_DEF(T, P, L, CONST, A, B, C) \ + template \ + vec<3, T, Q> vec::A ## B ## C() CONST \ + { \ + return vec<3, T, Q>(this->A, this->B, this->C); \ + } + +#define GLM_SWIZZLE_GEN_VEC4_ENTRY_DEF(T, P, L, CONST, A, B, C, D) \ + template \ + vec<4, T, Q> vec::A ## B ## C ## D() CONST \ + { \ + return vec<4, T, Q>(this->A, this->B, this->C, this->D); \ + } + +#define GLM_MUTABLE + +#define GLM_SWIZZLE_GEN_REF2_FROM_VEC2_SWIZZLE(T, P, A, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, 2, GLM_MUTABLE, A, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, 2, GLM_MUTABLE, B, A) + +#define GLM_SWIZZLE_GEN_REF_FROM_VEC2(T, P) \ + GLM_SWIZZLE_GEN_REF2_FROM_VEC2_SWIZZLE(T, P, x, y) \ + GLM_SWIZZLE_GEN_REF2_FROM_VEC2_SWIZZLE(T, P, r, g) \ + GLM_SWIZZLE_GEN_REF2_FROM_VEC2_SWIZZLE(T, P, s, t) + +#define GLM_SWIZZLE_GEN_REF2_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, B) + +#define GLM_SWIZZLE_GEN_REF3_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, A, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, A, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, B, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, B, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, C, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, C, B, A) + +#define GLM_SWIZZLE_GEN_REF_FROM_VEC3_COMP(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_REF3_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_REF2_FROM_VEC3_SWIZZLE(T, P, A, B, C) + +#define GLM_SWIZZLE_GEN_REF_FROM_VEC3(T, P) \ + GLM_SWIZZLE_GEN_REF_FROM_VEC3_COMP(T, P, x, y, z) \ + GLM_SWIZZLE_GEN_REF_FROM_VEC3_COMP(T, P, r, g, b) \ + GLM_SWIZZLE_GEN_REF_FROM_VEC3_COMP(T, P, s, t, p) + +#define GLM_SWIZZLE_GEN_REF2_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, D) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, D) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, D) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, D, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, D, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, D, C) + +#define GLM_SWIZZLE_GEN_REF3_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, B, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, C, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, D, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, D, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, A, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, C, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, D, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, D, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, A, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, B, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, D, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, D, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, C, B) + +#define GLM_SWIZZLE_GEN_REF4_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, C, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, C, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, D, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, D, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, B, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, C, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, C, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, D, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, D, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, A, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, A, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, B, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, B, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, D, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, D, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, A, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, A, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, C, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, C, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, A, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, B, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, B, C, A) + +#define GLM_SWIZZLE_GEN_REF_FROM_VEC4_COMP(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_REF2_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_REF3_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_REF4_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) + +#define GLM_SWIZZLE_GEN_REF_FROM_VEC4(T, P) \ + GLM_SWIZZLE_GEN_REF_FROM_VEC4_COMP(T, P, x, y, z, w) \ + GLM_SWIZZLE_GEN_REF_FROM_VEC4_COMP(T, P, r, g, b, a) \ + GLM_SWIZZLE_GEN_REF_FROM_VEC4_COMP(T, P, s, t, p, q) + +#define GLM_SWIZZLE_GEN_VEC2_FROM_VEC2_SWIZZLE(T, P, A, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, B) + +#define GLM_SWIZZLE_GEN_VEC3_FROM_VEC2_SWIZZLE(T, P, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, B) + +#define GLM_SWIZZLE_GEN_VEC4_FROM_VEC2_SWIZZLE(T, P, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, B) + +#define GLM_SWIZZLE_GEN_VEC_FROM_VEC2_COMP(T, P, A, B) \ + GLM_SWIZZLE_GEN_VEC2_FROM_VEC2_SWIZZLE(T, P, A, B) \ + GLM_SWIZZLE_GEN_VEC3_FROM_VEC2_SWIZZLE(T, P, A, B) \ + GLM_SWIZZLE_GEN_VEC4_FROM_VEC2_SWIZZLE(T, P, A, B) + +#define GLM_SWIZZLE_GEN_VEC_FROM_VEC2(T, P) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC2_COMP(T, P, x, y) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC2_COMP(T, P, r, g) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC2_COMP(T, P, s, t) + +#define GLM_SWIZZLE_GEN_VEC2_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, C) + +#define GLM_SWIZZLE_GEN_VEC3_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, C) + +#define GLM_SWIZZLE_GEN_VEC4_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, C) + +#define GLM_SWIZZLE_GEN_VEC_FROM_VEC3_COMP(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_VEC2_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_VEC3_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_FROM_VEC3_SWIZZLE(T, P, A, B, C) + +#define GLM_SWIZZLE_GEN_VEC_FROM_VEC3(T, P) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC3_COMP(T, P, x, y, z) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC3_COMP(T, P, r, g, b) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC3_COMP(T, P, s, t, p) + +#define GLM_SWIZZLE_GEN_VEC2_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, D) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, D) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, D) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, D, A) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, D, B) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, D, C) \ + GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, D, D) + +#define GLM_SWIZZLE_GEN_VEC3_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, D, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, D, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, D, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, D, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, D, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, D, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, D, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, D, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, D, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, D, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, D, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, D, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, A, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, A, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, A, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, A, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, B, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, B, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, B, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, B, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, C, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, C, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, C, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, C, D) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, D, A) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, D, B) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, D, C) \ + GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, D, D) + +#define GLM_SWIZZLE_GEN_VEC4_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, D, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, A, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, A, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, A, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, A, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, B, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, B, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, B, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, B, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, C, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, C, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, C, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, C, D) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, D, A) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, D, B) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, D, C) \ + GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, D, D) + +#define GLM_SWIZZLE_GEN_VEC_FROM_VEC4_COMP(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC2_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC3_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ + GLM_SWIZZLE_GEN_VEC4_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) + +#define GLM_SWIZZLE_GEN_VEC_FROM_VEC4(T, P) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC4_COMP(T, P, x, y, z, w) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC4_COMP(T, P, r, g, b, a) \ + GLM_SWIZZLE_GEN_VEC_FROM_VEC4_COMP(T, P, s, t, p, q) + diff --git a/MacroLibX/third_party/glm/detail/_vectorize.hpp b/MacroLibX/third_party/glm/detail/_vectorize.hpp new file mode 100644 index 0000000..1fcaec3 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/_vectorize.hpp @@ -0,0 +1,162 @@ +#pragma once + +namespace glm{ +namespace detail +{ + template class vec, length_t L, typename R, typename T, qualifier Q> + struct functor1{}; + + template class vec, typename R, typename T, qualifier Q> + struct functor1 + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<1, R, Q> call(R (*Func) (T x), vec<1, T, Q> const& v) + { + return vec<1, R, Q>(Func(v.x)); + } + }; + + template class vec, typename R, typename T, qualifier Q> + struct functor1 + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<2, R, Q> call(R (*Func) (T x), vec<2, T, Q> const& v) + { + return vec<2, R, Q>(Func(v.x), Func(v.y)); + } + }; + + template class vec, typename R, typename T, qualifier Q> + struct functor1 + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<3, R, Q> call(R (*Func) (T x), vec<3, T, Q> const& v) + { + return vec<3, R, Q>(Func(v.x), Func(v.y), Func(v.z)); + } + }; + + template class vec, typename R, typename T, qualifier Q> + struct functor1 + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, R, Q> call(R (*Func) (T x), vec<4, T, Q> const& v) + { + return vec<4, R, Q>(Func(v.x), Func(v.y), Func(v.z), Func(v.w)); + } + }; + + template class vec, length_t L, typename T, qualifier Q> + struct functor2{}; + + template class vec, typename T, qualifier Q> + struct functor2 + { + GLM_FUNC_QUALIFIER static vec<1, T, Q> call(T (*Func) (T x, T y), vec<1, T, Q> const& a, vec<1, T, Q> const& b) + { + return vec<1, T, Q>(Func(a.x, b.x)); + } + }; + + template class vec, typename T, qualifier Q> + struct functor2 + { + GLM_FUNC_QUALIFIER static vec<2, T, Q> call(T (*Func) (T x, T y), vec<2, T, Q> const& a, vec<2, T, Q> const& b) + { + return vec<2, T, Q>(Func(a.x, b.x), Func(a.y, b.y)); + } + }; + + template class vec, typename T, qualifier Q> + struct functor2 + { + GLM_FUNC_QUALIFIER static vec<3, T, Q> call(T (*Func) (T x, T y), vec<3, T, Q> const& a, vec<3, T, Q> const& b) + { + return vec<3, T, Q>(Func(a.x, b.x), Func(a.y, b.y), Func(a.z, b.z)); + } + }; + + template class vec, typename T, qualifier Q> + struct functor2 + { + GLM_FUNC_QUALIFIER static vec<4, T, Q> call(T (*Func) (T x, T y), vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(Func(a.x, b.x), Func(a.y, b.y), Func(a.z, b.z), Func(a.w, b.w)); + } + }; + + template class vec, length_t L, typename T, qualifier Q> + struct functor2_vec_sca{}; + + template class vec, typename T, qualifier Q> + struct functor2_vec_sca + { + GLM_FUNC_QUALIFIER static vec<1, T, Q> call(T (*Func) (T x, T y), vec<1, T, Q> const& a, T b) + { + return vec<1, T, Q>(Func(a.x, b)); + } + }; + + template class vec, typename T, qualifier Q> + struct functor2_vec_sca + { + GLM_FUNC_QUALIFIER static vec<2, T, Q> call(T (*Func) (T x, T y), vec<2, T, Q> const& a, T b) + { + return vec<2, T, Q>(Func(a.x, b), Func(a.y, b)); + } + }; + + template class vec, typename T, qualifier Q> + struct functor2_vec_sca + { + GLM_FUNC_QUALIFIER static vec<3, T, Q> call(T (*Func) (T x, T y), vec<3, T, Q> const& a, T b) + { + return vec<3, T, Q>(Func(a.x, b), Func(a.y, b), Func(a.z, b)); + } + }; + + template class vec, typename T, qualifier Q> + struct functor2_vec_sca + { + GLM_FUNC_QUALIFIER static vec<4, T, Q> call(T (*Func) (T x, T y), vec<4, T, Q> const& a, T b) + { + return vec<4, T, Q>(Func(a.x, b), Func(a.y, b), Func(a.z, b), Func(a.w, b)); + } + }; + + template + struct functor2_vec_int {}; + + template + struct functor2_vec_int<1, T, Q> + { + GLM_FUNC_QUALIFIER static vec<1, int, Q> call(int (*Func) (T x, int y), vec<1, T, Q> const& a, vec<1, int, Q> const& b) + { + return vec<1, int, Q>(Func(a.x, b.x)); + } + }; + + template + struct functor2_vec_int<2, T, Q> + { + GLM_FUNC_QUALIFIER static vec<2, int, Q> call(int (*Func) (T x, int y), vec<2, T, Q> const& a, vec<2, int, Q> const& b) + { + return vec<2, int, Q>(Func(a.x, b.x), Func(a.y, b.y)); + } + }; + + template + struct functor2_vec_int<3, T, Q> + { + GLM_FUNC_QUALIFIER static vec<3, int, Q> call(int (*Func) (T x, int y), vec<3, T, Q> const& a, vec<3, int, Q> const& b) + { + return vec<3, int, Q>(Func(a.x, b.x), Func(a.y, b.y), Func(a.z, b.z)); + } + }; + + template + struct functor2_vec_int<4, T, Q> + { + GLM_FUNC_QUALIFIER static vec<4, int, Q> call(int (*Func) (T x, int y), vec<4, T, Q> const& a, vec<4, int, Q> const& b) + { + return vec<4, int, Q>(Func(a.x, b.x), Func(a.y, b.y), Func(a.z, b.z), Func(a.w, b.w)); + } + }; +}//namespace detail +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/compute_common.hpp b/MacroLibX/third_party/glm/detail/compute_common.hpp new file mode 100644 index 0000000..cc24b9e --- /dev/null +++ b/MacroLibX/third_party/glm/detail/compute_common.hpp @@ -0,0 +1,50 @@ +#pragma once + +#include "setup.hpp" +#include + +namespace glm{ +namespace detail +{ + template + struct compute_abs + {}; + + template + struct compute_abs + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static genFIType call(genFIType x) + { + GLM_STATIC_ASSERT( + std::numeric_limits::is_iec559 || std::numeric_limits::is_signed, + "'abs' only accept floating-point and integer scalar or vector inputs"); + + return x >= genFIType(0) ? x : -x; + // TODO, perf comp with: *(((int *) &x) + 1) &= 0x7fffffff; + } + }; + +#if GLM_COMPILER & GLM_COMPILER_CUDA + template<> + struct compute_abs + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static float call(float x) + { + return fabsf(x); + } + }; +#endif + + template + struct compute_abs + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static genFIType call(genFIType x) + { + GLM_STATIC_ASSERT( + (!std::numeric_limits::is_signed && std::numeric_limits::is_integer), + "'abs' only accept floating-point and integer scalar or vector inputs"); + return x; + } + }; +}//namespace detail +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/compute_vector_relational.hpp b/MacroLibX/third_party/glm/detail/compute_vector_relational.hpp new file mode 100644 index 0000000..167b634 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/compute_vector_relational.hpp @@ -0,0 +1,30 @@ +#pragma once + +//#include "compute_common.hpp" +#include "setup.hpp" +#include + +namespace glm{ +namespace detail +{ + template + struct compute_equal + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static bool call(T a, T b) + { + return a == b; + } + }; +/* + template + struct compute_equal + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static bool call(T a, T b) + { + return detail::compute_abs::is_signed>::call(b - a) <= static_cast(0); + //return std::memcmp(&a, &b, sizeof(T)) == 0; + } + }; +*/ +}//namespace detail +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/func_common.inl b/MacroLibX/third_party/glm/detail/func_common.inl new file mode 100644 index 0000000..4b5f144 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_common.inl @@ -0,0 +1,792 @@ +/// @ref core +/// @file glm/detail/func_common.inl + +#include "../vector_relational.hpp" +#include "compute_common.hpp" +#include "type_vec1.hpp" +#include "type_vec2.hpp" +#include "type_vec3.hpp" +#include "type_vec4.hpp" +#include "_vectorize.hpp" +#include + +namespace glm +{ + // min + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType min(genType x, genType y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'min' only accept floating-point or integer inputs"); + return (y < x) ? y : x; + } + + // max + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType max(genType x, genType y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'max' only accept floating-point or integer inputs"); + + return (x < y) ? y : x; + } + + // abs + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR int abs(int x) + { + int const y = x >> (sizeof(int) * 8 - 1); + return (x ^ y) - y; + } + + // round +# if GLM_HAS_CXX11_STL + using ::std::round; +# else + template + GLM_FUNC_QUALIFIER genType round(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'round' only accept floating-point inputs"); + + return x < static_cast(0) ? static_cast(int(x - static_cast(0.5))) : static_cast(int(x + static_cast(0.5))); + } +# endif + + // trunc +# if GLM_HAS_CXX11_STL + using ::std::trunc; +# else + template + GLM_FUNC_QUALIFIER genType trunc(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'trunc' only accept floating-point inputs"); + + return x < static_cast(0) ? -std::floor(-x) : std::floor(x); + } +# endif + +}//namespace glm + +namespace glm{ +namespace detail +{ + template + struct compute_abs_vector + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec call(vec const& x) + { + return detail::functor1::call(abs, x); + } + }; + + template + struct compute_mix_vector + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y, vec const& a) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'mix' only accept floating-point inputs for the interpolator a"); + + return vec(vec(x) * (static_cast(1) - a) + vec(y) * a); + } + }; + + template + struct compute_mix_vector + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y, vec const& a) + { + vec Result; + for(length_t i = 0; i < x.length(); ++i) + Result[i] = a[i] ? y[i] : x[i]; + return Result; + } + }; + + template + struct compute_mix_scalar + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y, U const& a) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'mix' only accept floating-point inputs for the interpolator a"); + + return vec(vec(x) * (static_cast(1) - a) + vec(y) * a); + } + }; + + template + struct compute_mix_scalar + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y, bool const& a) + { + return a ? y : x; + } + }; + + template + struct compute_mix + { + GLM_FUNC_QUALIFIER static T call(T const& x, T const& y, U const& a) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'mix' only accept floating-point inputs for the interpolator a"); + + return static_cast(static_cast(x) * (static_cast(1) - a) + static_cast(y) * a); + } + }; + + template + struct compute_mix + { + GLM_FUNC_QUALIFIER static T call(T const& x, T const& y, bool const& a) + { + return a ? y : x; + } + }; + + template + struct compute_sign + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return vec(glm::lessThan(vec(0), x)) - vec(glm::lessThan(x, vec(0))); + } + }; + +# if GLM_ARCH == GLM_ARCH_X86 + template + struct compute_sign + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + T const Shift(static_cast(sizeof(T) * 8 - 1)); + vec const y(vec::type, Q>(-x) >> typename detail::make_unsigned::type(Shift)); + + return (x >> Shift) | y; + } + }; +# endif + + template + struct compute_floor + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return detail::functor1::call(std::floor, x); + } + }; + + template + struct compute_ceil + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return detail::functor1::call(std::ceil, x); + } + }; + + template + struct compute_fract + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return x - floor(x); + } + }; + + template + struct compute_trunc + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return detail::functor1::call(trunc, x); + } + }; + + template + struct compute_round + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return detail::functor1::call(round, x); + } + }; + + template + struct compute_mod + { + GLM_FUNC_QUALIFIER static vec call(vec const& a, vec const& b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'mod' only accept floating-point inputs. Include for integer inputs."); + return a - b * floor(a / b); + } + }; + + template + struct compute_min_vector + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y) + { + return detail::functor2::call(min, x, y); + } + }; + + template + struct compute_max_vector + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y) + { + return detail::functor2::call(max, x, y); + } + }; + + template + struct compute_clamp_vector + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& minVal, vec const& maxVal) + { + return min(max(x, minVal), maxVal); + } + }; + + template + struct compute_step_vector + { + GLM_FUNC_QUALIFIER static vec call(vec const& edge, vec const& x) + { + return mix(vec(1), vec(0), glm::lessThan(x, edge)); + } + }; + + template + struct compute_smoothstep_vector + { + GLM_FUNC_QUALIFIER static vec call(vec const& edge0, vec const& edge1, vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'smoothstep' only accept floating-point inputs"); + vec const tmp(clamp((x - edge0) / (edge1 - edge0), static_cast(0), static_cast(1))); + return tmp * tmp * (static_cast(3) - static_cast(2) * tmp); + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genFIType abs(genFIType x) + { + return detail::compute_abs::is_signed>::call(x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec abs(vec const& x) + { + return detail::compute_abs_vector::value>::call(x); + } + + // sign + // fast and works for any type + template + GLM_FUNC_QUALIFIER genFIType sign(genFIType x) + { + GLM_STATIC_ASSERT( + std::numeric_limits::is_iec559 || (std::numeric_limits::is_signed && std::numeric_limits::is_integer), + "'sign' only accept signed inputs"); + + return detail::compute_sign<1, genFIType, defaultp, + std::numeric_limits::is_iec559, detail::is_aligned::value>::call(vec<1, genFIType>(x)).x; + } + + template + GLM_FUNC_QUALIFIER vec sign(vec const& x) + { + GLM_STATIC_ASSERT( + std::numeric_limits::is_iec559 || (std::numeric_limits::is_signed && std::numeric_limits::is_integer), + "'sign' only accept signed inputs"); + + return detail::compute_sign::is_iec559, detail::is_aligned::value>::call(x); + } + + // floor + using ::std::floor; + template + GLM_FUNC_QUALIFIER vec floor(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'floor' only accept floating-point inputs."); + return detail::compute_floor::value>::call(x); + } + + template + GLM_FUNC_QUALIFIER vec trunc(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'trunc' only accept floating-point inputs"); + return detail::compute_trunc::value>::call(x); + } + + template + GLM_FUNC_QUALIFIER vec round(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'round' only accept floating-point inputs"); + return detail::compute_round::value>::call(x); + } + +/* + // roundEven + template + GLM_FUNC_QUALIFIER genType roundEven(genType const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'roundEven' only accept floating-point inputs"); + + return genType(int(x + genType(int(x) % 2))); + } +*/ + + // roundEven + template + GLM_FUNC_QUALIFIER genType roundEven(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'roundEven' only accept floating-point inputs"); + + int Integer = static_cast(x); + genType IntegerPart = static_cast(Integer); + genType FractionalPart = fract(x); + + if(FractionalPart > static_cast(0.5) || FractionalPart < static_cast(0.5)) + { + return round(x); + } + else if((Integer % 2) == 0) + { + return IntegerPart; + } + else if(x <= static_cast(0)) // Work around... + { + return IntegerPart - static_cast(1); + } + else + { + return IntegerPart + static_cast(1); + } + //else // Bug on MinGW 4.5.2 + //{ + // return mix(IntegerPart + genType(-1), IntegerPart + genType(1), x <= genType(0)); + //} + } + + template + GLM_FUNC_QUALIFIER vec roundEven(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'roundEven' only accept floating-point inputs"); + return detail::functor1::call(roundEven, x); + } + + // ceil + using ::std::ceil; + template + GLM_FUNC_QUALIFIER vec ceil(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'ceil' only accept floating-point inputs"); + return detail::compute_ceil::value>::call(x); + } + + // fract + template + GLM_FUNC_QUALIFIER genType fract(genType x) + { + return fract(vec<1, genType>(x)).x; + } + + template + GLM_FUNC_QUALIFIER vec fract(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fract' only accept floating-point inputs"); + return detail::compute_fract::value>::call(x); + } + + // mod + template + GLM_FUNC_QUALIFIER genType mod(genType x, genType y) + { +# if GLM_COMPILER & GLM_COMPILER_CUDA + // Another Cuda compiler bug https://github.com/g-truc/glm/issues/530 + vec<1, genType, defaultp> Result(mod(vec<1, genType, defaultp>(x), y)); + return Result.x; +# else + return mod(vec<1, genType, defaultp>(x), y).x; +# endif + } + + template + GLM_FUNC_QUALIFIER vec mod(vec const& x, T y) + { + return detail::compute_mod::value>::call(x, vec(y)); + } + + template + GLM_FUNC_QUALIFIER vec mod(vec const& x, vec const& y) + { + return detail::compute_mod::value>::call(x, y); + } + + // modf + template + GLM_FUNC_QUALIFIER genType modf(genType x, genType & i) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'modf' only accept floating-point inputs"); + return std::modf(x, &i); + } + + template + GLM_FUNC_QUALIFIER vec<1, T, Q> modf(vec<1, T, Q> const& x, vec<1, T, Q> & i) + { + return vec<1, T, Q>( + modf(x.x, i.x)); + } + + template + GLM_FUNC_QUALIFIER vec<2, T, Q> modf(vec<2, T, Q> const& x, vec<2, T, Q> & i) + { + return vec<2, T, Q>( + modf(x.x, i.x), + modf(x.y, i.y)); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> modf(vec<3, T, Q> const& x, vec<3, T, Q> & i) + { + return vec<3, T, Q>( + modf(x.x, i.x), + modf(x.y, i.y), + modf(x.z, i.z)); + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> modf(vec<4, T, Q> const& x, vec<4, T, Q> & i) + { + return vec<4, T, Q>( + modf(x.x, i.x), + modf(x.y, i.y), + modf(x.z, i.z), + modf(x.w, i.w)); + } + + //// Only valid if (INT_MIN <= x-y <= INT_MAX) + //// min(x,y) + //r = y + ((x - y) & ((x - y) >> (sizeof(int) * + //CHAR_BIT - 1))); + //// max(x,y) + //r = x - ((x - y) & ((x - y) >> (sizeof(int) * + //CHAR_BIT - 1))); + + // min + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec min(vec const& a, T b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'min' only accept floating-point or integer inputs"); + return detail::compute_min_vector::value>::call(a, vec(b)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec min(vec const& a, vec const& b) + { + return detail::compute_min_vector::value>::call(a, b); + } + + // max + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec max(vec const& a, T b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'max' only accept floating-point or integer inputs"); + return detail::compute_max_vector::value>::call(a, vec(b)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec max(vec const& a, vec const& b) + { + return detail::compute_max_vector::value>::call(a, b); + } + + // clamp + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType clamp(genType x, genType minVal, genType maxVal) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'clamp' only accept floating-point or integer inputs"); + return min(max(x, minVal), maxVal); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec clamp(vec const& x, T minVal, T maxVal) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'clamp' only accept floating-point or integer inputs"); + return detail::compute_clamp_vector::value>::call(x, vec(minVal), vec(maxVal)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec clamp(vec const& x, vec const& minVal, vec const& maxVal) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'clamp' only accept floating-point or integer inputs"); + return detail::compute_clamp_vector::value>::call(x, minVal, maxVal); + } + + template + GLM_FUNC_QUALIFIER genTypeT mix(genTypeT x, genTypeT y, genTypeU a) + { + return detail::compute_mix::call(x, y, a); + } + + template + GLM_FUNC_QUALIFIER vec mix(vec const& x, vec const& y, U a) + { + return detail::compute_mix_scalar::value>::call(x, y, a); + } + + template + GLM_FUNC_QUALIFIER vec mix(vec const& x, vec const& y, vec const& a) + { + return detail::compute_mix_vector::value>::call(x, y, a); + } + + // step + template + GLM_FUNC_QUALIFIER genType step(genType edge, genType x) + { + return mix(static_cast(1), static_cast(0), x < edge); + } + + template + GLM_FUNC_QUALIFIER vec step(T edge, vec const& x) + { + return detail::compute_step_vector::value>::call(vec(edge), x); + } + + template + GLM_FUNC_QUALIFIER vec step(vec const& edge, vec const& x) + { + return detail::compute_step_vector::value>::call(edge, x); + } + + // smoothstep + template + GLM_FUNC_QUALIFIER genType smoothstep(genType edge0, genType edge1, genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'smoothstep' only accept floating-point inputs"); + + genType const tmp(clamp((x - edge0) / (edge1 - edge0), genType(0), genType(1))); + return tmp * tmp * (genType(3) - genType(2) * tmp); + } + + template + GLM_FUNC_QUALIFIER vec smoothstep(T edge0, T edge1, vec const& x) + { + return detail::compute_smoothstep_vector::value>::call(vec(edge0), vec(edge1), x); + } + + template + GLM_FUNC_QUALIFIER vec smoothstep(vec const& edge0, vec const& edge1, vec const& x) + { + return detail::compute_smoothstep_vector::value>::call(edge0, edge1, x); + } + +# if GLM_HAS_CXX11_STL + using std::isnan; +# else + template + GLM_FUNC_QUALIFIER bool isnan(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isnan' only accept floating-point inputs"); + +# if GLM_HAS_CXX11_STL + return std::isnan(x); +# elif GLM_COMPILER & GLM_COMPILER_VC + return _isnan(x) != 0; +# elif GLM_COMPILER & GLM_COMPILER_INTEL +# if GLM_PLATFORM & GLM_PLATFORM_WINDOWS + return _isnan(x) != 0; +# else + return ::isnan(x) != 0; +# endif +# elif (GLM_COMPILER & (GLM_COMPILER_GCC | GLM_COMPILER_CLANG)) && (GLM_PLATFORM & GLM_PLATFORM_ANDROID) && __cplusplus < 201103L + return _isnan(x) != 0; +# elif GLM_COMPILER & GLM_COMPILER_CUDA + return ::isnan(x) != 0; +# else + return std::isnan(x); +# endif + } +# endif + + template + GLM_FUNC_QUALIFIER vec isnan(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isnan' only accept floating-point inputs"); + + vec Result; + for (length_t l = 0; l < v.length(); ++l) + Result[l] = glm::isnan(v[l]); + return Result; + } + +# if GLM_HAS_CXX11_STL + using std::isinf; +# else + template + GLM_FUNC_QUALIFIER bool isinf(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isinf' only accept floating-point inputs"); + +# if GLM_HAS_CXX11_STL + return std::isinf(x); +# elif GLM_COMPILER & (GLM_COMPILER_INTEL | GLM_COMPILER_VC) +# if(GLM_PLATFORM & GLM_PLATFORM_WINDOWS) + return _fpclass(x) == _FPCLASS_NINF || _fpclass(x) == _FPCLASS_PINF; +# else + return ::isinf(x); +# endif +# elif GLM_COMPILER & (GLM_COMPILER_GCC | GLM_COMPILER_CLANG) +# if(GLM_PLATFORM & GLM_PLATFORM_ANDROID && __cplusplus < 201103L) + return _isinf(x) != 0; +# else + return std::isinf(x); +# endif +# elif GLM_COMPILER & GLM_COMPILER_CUDA + // http://developer.download.nvidia.com/compute/cuda/4_2/rel/toolkit/docs/online/group__CUDA__MATH__DOUBLE_g13431dd2b40b51f9139cbb7f50c18fab.html#g13431dd2b40b51f9139cbb7f50c18fab + return ::isinf(double(x)) != 0; +# else + return std::isinf(x); +# endif + } +# endif + + template + GLM_FUNC_QUALIFIER vec isinf(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isinf' only accept floating-point inputs"); + + vec Result; + for (length_t l = 0; l < v.length(); ++l) + Result[l] = glm::isinf(v[l]); + return Result; + } + + GLM_FUNC_QUALIFIER int floatBitsToInt(float const& v) + { + union + { + float in; + int out; + } u; + + u.in = v; + + return u.out; + } + + template + GLM_FUNC_QUALIFIER vec floatBitsToInt(vec const& v) + { + return reinterpret_cast&>(const_cast&>(v)); + } + + GLM_FUNC_QUALIFIER uint floatBitsToUint(float const& v) + { + union + { + float in; + uint out; + } u; + + u.in = v; + + return u.out; + } + + template + GLM_FUNC_QUALIFIER vec floatBitsToUint(vec const& v) + { + return reinterpret_cast&>(const_cast&>(v)); + } + + GLM_FUNC_QUALIFIER float intBitsToFloat(int const& v) + { + union + { + int in; + float out; + } u; + + u.in = v; + + return u.out; + } + + template + GLM_FUNC_QUALIFIER vec intBitsToFloat(vec const& v) + { + return reinterpret_cast&>(const_cast&>(v)); + } + + GLM_FUNC_QUALIFIER float uintBitsToFloat(uint const& v) + { + union + { + uint in; + float out; + } u; + + u.in = v; + + return u.out; + } + + template + GLM_FUNC_QUALIFIER vec uintBitsToFloat(vec const& v) + { + return reinterpret_cast&>(const_cast&>(v)); + } + +# if GLM_HAS_CXX11_STL + using std::fma; +# else + template + GLM_FUNC_QUALIFIER genType fma(genType const& a, genType const& b, genType const& c) + { + return a * b + c; + } +# endif + + template + GLM_FUNC_QUALIFIER genType frexp(genType x, int& exp) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'frexp' only accept floating-point inputs"); + + return std::frexp(x, &exp); + } + + template + GLM_FUNC_QUALIFIER vec frexp(vec const& v, vec& exp) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'frexp' only accept floating-point inputs"); + + vec Result; + for (length_t l = 0; l < v.length(); ++l) + Result[l] = std::frexp(v[l], &exp[l]); + return Result; + } + + template + GLM_FUNC_QUALIFIER genType ldexp(genType const& x, int const& exp) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'ldexp' only accept floating-point inputs"); + + return std::ldexp(x, exp); + } + + template + GLM_FUNC_QUALIFIER vec ldexp(vec const& v, vec const& exp) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'ldexp' only accept floating-point inputs"); + + vec Result; + for (length_t l = 0; l < v.length(); ++l) + Result[l] = std::ldexp(v[l], exp[l]); + return Result; + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "func_common_simd.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/func_common_simd.inl b/MacroLibX/third_party/glm/detail/func_common_simd.inl new file mode 100644 index 0000000..ce0032d --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_common_simd.inl @@ -0,0 +1,231 @@ +/// @ref core +/// @file glm/detail/func_common_simd.inl + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +#include "../simd/common.h" + +#include + +namespace glm{ +namespace detail +{ + template + struct compute_abs_vector<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) + { + vec<4, float, Q> result; + result.data = glm_vec4_abs(v.data); + return result; + } + }; + + template + struct compute_abs_vector<4, int, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& v) + { + vec<4, int, Q> result; + result.data = glm_ivec4_abs(v.data); + return result; + } + }; + + template + struct compute_floor<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) + { + vec<4, float, Q> result; + result.data = glm_vec4_floor(v.data); + return result; + } + }; + + template + struct compute_ceil<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) + { + vec<4, float, Q> result; + result.data = glm_vec4_ceil(v.data); + return result; + } + }; + + template + struct compute_fract<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) + { + vec<4, float, Q> result; + result.data = glm_vec4_fract(v.data); + return result; + } + }; + + template + struct compute_round<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) + { + vec<4, float, Q> result; + result.data = glm_vec4_round(v.data); + return result; + } + }; + + template + struct compute_mod<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& x, vec<4, float, Q> const& y) + { + vec<4, float, Q> result; + result.data = glm_vec4_mod(x.data, y.data); + return result; + } + }; + + template + struct compute_min_vector<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) + { + vec<4, float, Q> result; + result.data = _mm_min_ps(v1.data, v2.data); + return result; + } + }; + + template + struct compute_min_vector<4, int, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) + { + vec<4, int, Q> result; + result.data = _mm_min_epi32(v1.data, v2.data); + return result; + } + }; + + template + struct compute_min_vector<4, uint, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& v1, vec<4, uint, Q> const& v2) + { + vec<4, uint, Q> result; + result.data = _mm_min_epu32(v1.data, v2.data); + return result; + } + }; + + template + struct compute_max_vector<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) + { + vec<4, float, Q> result; + result.data = _mm_max_ps(v1.data, v2.data); + return result; + } + }; + + template + struct compute_max_vector<4, int, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) + { + vec<4, int, Q> result; + result.data = _mm_max_epi32(v1.data, v2.data); + return result; + } + }; + + template + struct compute_max_vector<4, uint, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& v1, vec<4, uint, Q> const& v2) + { + vec<4, uint, Q> result; + result.data = _mm_max_epu32(v1.data, v2.data); + return result; + } + }; + + template + struct compute_clamp_vector<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& x, vec<4, float, Q> const& minVal, vec<4, float, Q> const& maxVal) + { + vec<4, float, Q> result; + result.data = _mm_min_ps(_mm_max_ps(x.data, minVal.data), maxVal.data); + return result; + } + }; + + template + struct compute_clamp_vector<4, int, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& x, vec<4, int, Q> const& minVal, vec<4, int, Q> const& maxVal) + { + vec<4, int, Q> result; + result.data = _mm_min_epi32(_mm_max_epi32(x.data, minVal.data), maxVal.data); + return result; + } + }; + + template + struct compute_clamp_vector<4, uint, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& x, vec<4, uint, Q> const& minVal, vec<4, uint, Q> const& maxVal) + { + vec<4, uint, Q> result; + result.data = _mm_min_epu32(_mm_max_epu32(x.data, minVal.data), maxVal.data); + return result; + } + }; + + template + struct compute_mix_vector<4, float, bool, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& x, vec<4, float, Q> const& y, vec<4, bool, Q> const& a) + { + __m128i const Load = _mm_set_epi32(-static_cast(a.w), -static_cast(a.z), -static_cast(a.y), -static_cast(a.x)); + __m128 const Mask = _mm_castsi128_ps(Load); + + vec<4, float, Q> Result; +# if 0 && GLM_ARCH & GLM_ARCH_AVX + Result.data = _mm_blendv_ps(x.data, y.data, Mask); +# else + Result.data = _mm_or_ps(_mm_and_ps(Mask, y.data), _mm_andnot_ps(Mask, x.data)); +# endif + return Result; + } + }; +/* FIXME + template + struct compute_step_vector + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& edge, vec<4, float, Q> const& x) + { + vec<4, float, Q> Result; + result.data = glm_vec4_step(edge.data, x.data); + return result; + } + }; +*/ + template + struct compute_smoothstep_vector<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& edge0, vec<4, float, Q> const& edge1, vec<4, float, Q> const& x) + { + vec<4, float, Q> Result; + Result.data = glm_vec4_smoothstep(edge0.data, edge1.data, x.data); + return Result; + } + }; +}//namespace detail +}//namespace glm + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/detail/func_exponential.inl b/MacroLibX/third_party/glm/detail/func_exponential.inl new file mode 100644 index 0000000..2040d41 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_exponential.inl @@ -0,0 +1,152 @@ +/// @ref core +/// @file glm/detail/func_exponential.inl + +#include "../vector_relational.hpp" +#include "_vectorize.hpp" +#include +#include +#include + +namespace glm{ +namespace detail +{ +# if GLM_HAS_CXX11_STL + using std::log2; +# else + template + genType log2(genType Value) + { + return std::log(Value) * static_cast(1.4426950408889634073599246810019); + } +# endif + + template + struct compute_log2 + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'log2' only accept floating-point inputs. Include for integer inputs."); + + return detail::functor1::call(log2, v); + } + }; + + template + struct compute_sqrt + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return detail::functor1::call(std::sqrt, x); + } + }; + + template + struct compute_inversesqrt + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return static_cast(1) / sqrt(x); + } + }; + + template + struct compute_inversesqrt + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + vec tmp(x); + vec xhalf(tmp * 0.5f); + vec* p = reinterpret_cast*>(const_cast*>(&x)); + vec i = vec(0x5f375a86) - (*p >> vec(1)); + vec* ptmp = reinterpret_cast*>(&i); + tmp = *ptmp; + tmp = tmp * (1.5f - xhalf * tmp * tmp); + return tmp; + } + }; +}//namespace detail + + // pow + using std::pow; + template + GLM_FUNC_QUALIFIER vec pow(vec const& base, vec const& exponent) + { + return detail::functor2::call(pow, base, exponent); + } + + // exp + using std::exp; + template + GLM_FUNC_QUALIFIER vec exp(vec const& x) + { + return detail::functor1::call(exp, x); + } + + // log + using std::log; + template + GLM_FUNC_QUALIFIER vec log(vec const& x) + { + return detail::functor1::call(log, x); + } + +# if GLM_HAS_CXX11_STL + using std::exp2; +# else + //exp2, ln2 = 0.69314718055994530941723212145818f + template + GLM_FUNC_QUALIFIER genType exp2(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'exp2' only accept floating-point inputs"); + + return std::exp(static_cast(0.69314718055994530941723212145818) * x); + } +# endif + + template + GLM_FUNC_QUALIFIER vec exp2(vec const& x) + { + return detail::functor1::call(exp2, x); + } + + // log2, ln2 = 0.69314718055994530941723212145818f + template + GLM_FUNC_QUALIFIER genType log2(genType x) + { + return log2(vec<1, genType>(x)).x; + } + + template + GLM_FUNC_QUALIFIER vec log2(vec const& x) + { + return detail::compute_log2::is_iec559, detail::is_aligned::value>::call(x); + } + + // sqrt + using std::sqrt; + template + GLM_FUNC_QUALIFIER vec sqrt(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sqrt' only accept floating-point inputs"); + return detail::compute_sqrt::value>::call(x); + } + + // inversesqrt + template + GLM_FUNC_QUALIFIER genType inversesqrt(genType x) + { + return static_cast(1) / sqrt(x); + } + + template + GLM_FUNC_QUALIFIER vec inversesqrt(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'inversesqrt' only accept floating-point inputs"); + return detail::compute_inversesqrt::value>::call(x); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "func_exponential_simd.inl" +#endif + diff --git a/MacroLibX/third_party/glm/detail/func_exponential_simd.inl b/MacroLibX/third_party/glm/detail/func_exponential_simd.inl new file mode 100644 index 0000000..fb78951 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_exponential_simd.inl @@ -0,0 +1,37 @@ +/// @ref core +/// @file glm/detail/func_exponential_simd.inl + +#include "../simd/exponential.h" + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +namespace glm{ +namespace detail +{ + template + struct compute_sqrt<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) + { + vec<4, float, Q> Result; + Result.data = _mm_sqrt_ps(v.data); + return Result; + } + }; + +# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE + template<> + struct compute_sqrt<4, float, aligned_lowp, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, aligned_lowp> call(vec<4, float, aligned_lowp> const& v) + { + vec<4, float, aligned_lowp> Result; + Result.data = glm_vec4_sqrt_lowp(v.data); + return Result; + } + }; +# endif +}//namespace detail +}//namespace glm + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/detail/func_geometric.inl b/MacroLibX/third_party/glm/detail/func_geometric.inl new file mode 100644 index 0000000..9cde28f --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_geometric.inl @@ -0,0 +1,243 @@ +#include "../exponential.hpp" +#include "../common.hpp" + +namespace glm{ +namespace detail +{ + template + struct compute_length + { + GLM_FUNC_QUALIFIER static T call(vec const& v) + { + return sqrt(dot(v, v)); + } + }; + + template + struct compute_distance + { + GLM_FUNC_QUALIFIER static T call(vec const& p0, vec const& p1) + { + return length(p1 - p0); + } + }; + + template + struct compute_dot{}; + + template + struct compute_dot, T, Aligned> + { + GLM_FUNC_QUALIFIER static T call(vec<1, T, Q> const& a, vec<1, T, Q> const& b) + { + return a.x * b.x; + } + }; + + template + struct compute_dot, T, Aligned> + { + GLM_FUNC_QUALIFIER static T call(vec<2, T, Q> const& a, vec<2, T, Q> const& b) + { + vec<2, T, Q> tmp(a * b); + return tmp.x + tmp.y; + } + }; + + template + struct compute_dot, T, Aligned> + { + GLM_FUNC_QUALIFIER static T call(vec<3, T, Q> const& a, vec<3, T, Q> const& b) + { + vec<3, T, Q> tmp(a * b); + return tmp.x + tmp.y + tmp.z; + } + }; + + template + struct compute_dot, T, Aligned> + { + GLM_FUNC_QUALIFIER static T call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> tmp(a * b); + return (tmp.x + tmp.y) + (tmp.z + tmp.w); + } + }; + + template + struct compute_cross + { + GLM_FUNC_QUALIFIER static vec<3, T, Q> call(vec<3, T, Q> const& x, vec<3, T, Q> const& y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'cross' accepts only floating-point inputs"); + + return vec<3, T, Q>( + x.y * y.z - y.y * x.z, + x.z * y.x - y.z * x.x, + x.x * y.y - y.x * x.y); + } + }; + + template + struct compute_normalize + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'normalize' accepts only floating-point inputs"); + + return v * inversesqrt(dot(v, v)); + } + }; + + template + struct compute_faceforward + { + GLM_FUNC_QUALIFIER static vec call(vec const& N, vec const& I, vec const& Nref) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'normalize' accepts only floating-point inputs"); + + return dot(Nref, I) < static_cast(0) ? N : -N; + } + }; + + template + struct compute_reflect + { + GLM_FUNC_QUALIFIER static vec call(vec const& I, vec const& N) + { + return I - N * dot(N, I) * static_cast(2); + } + }; + + template + struct compute_refract + { + GLM_FUNC_QUALIFIER static vec call(vec const& I, vec const& N, T eta) + { + T const dotValue(dot(N, I)); + T const k(static_cast(1) - eta * eta * (static_cast(1) - dotValue * dotValue)); + vec const Result = + (k >= static_cast(0)) ? (eta * I - (eta * dotValue + std::sqrt(k)) * N) : vec(0); + return Result; + } + }; +}//namespace detail + + // length + template + GLM_FUNC_QUALIFIER genType length(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'length' accepts only floating-point inputs"); + + return abs(x); + } + + template + GLM_FUNC_QUALIFIER T length(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'length' accepts only floating-point inputs"); + + return detail::compute_length::value>::call(v); + } + + // distance + template + GLM_FUNC_QUALIFIER genType distance(genType const& p0, genType const& p1) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'distance' accepts only floating-point inputs"); + + return length(p1 - p0); + } + + template + GLM_FUNC_QUALIFIER T distance(vec const& p0, vec const& p1) + { + return detail::compute_distance::value>::call(p0, p1); + } + + // dot + template + GLM_FUNC_QUALIFIER T dot(T x, T y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'dot' accepts only floating-point inputs"); + return x * y; + } + + template + GLM_FUNC_QUALIFIER T dot(vec const& x, vec const& y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'dot' accepts only floating-point inputs"); + return detail::compute_dot, T, detail::is_aligned::value>::call(x, y); + } + + // cross + template + GLM_FUNC_QUALIFIER vec<3, T, Q> cross(vec<3, T, Q> const& x, vec<3, T, Q> const& y) + { + return detail::compute_cross::value>::call(x, y); + } +/* + // normalize + template + GLM_FUNC_QUALIFIER genType normalize(genType const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'normalize' accepts only floating-point inputs"); + + return x < genType(0) ? genType(-1) : genType(1); + } +*/ + template + GLM_FUNC_QUALIFIER vec normalize(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'normalize' accepts only floating-point inputs"); + + return detail::compute_normalize::value>::call(x); + } + + // faceforward + template + GLM_FUNC_QUALIFIER genType faceforward(genType const& N, genType const& I, genType const& Nref) + { + return dot(Nref, I) < static_cast(0) ? N : -N; + } + + template + GLM_FUNC_QUALIFIER vec faceforward(vec const& N, vec const& I, vec const& Nref) + { + return detail::compute_faceforward::value>::call(N, I, Nref); + } + + // reflect + template + GLM_FUNC_QUALIFIER genType reflect(genType const& I, genType const& N) + { + return I - N * dot(N, I) * genType(2); + } + + template + GLM_FUNC_QUALIFIER vec reflect(vec const& I, vec const& N) + { + return detail::compute_reflect::value>::call(I, N); + } + + // refract + template + GLM_FUNC_QUALIFIER genType refract(genType const& I, genType const& N, genType eta) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'refract' accepts only floating-point inputs"); + genType const dotValue(dot(N, I)); + genType const k(static_cast(1) - eta * eta * (static_cast(1) - dotValue * dotValue)); + return (eta * I - (eta * dotValue + sqrt(k)) * N) * static_cast(k >= static_cast(0)); + } + + template + GLM_FUNC_QUALIFIER vec refract(vec const& I, vec const& N, T eta) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'refract' accepts only floating-point inputs"); + return detail::compute_refract::value>::call(I, N, eta); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "func_geometric_simd.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/func_geometric_simd.inl b/MacroLibX/third_party/glm/detail/func_geometric_simd.inl new file mode 100644 index 0000000..dfe3f4c --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_geometric_simd.inl @@ -0,0 +1,165 @@ +/// @ref core +/// @file glm/detail/func_geometric_simd.inl + +#include "../simd/geometric.h" + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +namespace glm{ +namespace detail +{ + template + struct compute_length<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& v) + { + return _mm_cvtss_f32(glm_vec4_length(v.data)); + } + }; + + template + struct compute_distance<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& p0, vec<4, float, Q> const& p1) + { + return _mm_cvtss_f32(glm_vec4_distance(p0.data, p1.data)); + } + }; + + template + struct compute_dot, float, true> + { + GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& x, vec<4, float, Q> const& y) + { + return _mm_cvtss_f32(glm_vec1_dot(x.data, y.data)); + } + }; + + template + struct compute_cross + { + GLM_FUNC_QUALIFIER static vec<3, float, Q> call(vec<3, float, Q> const& a, vec<3, float, Q> const& b) + { + __m128 const set0 = _mm_set_ps(0.0f, a.z, a.y, a.x); + __m128 const set1 = _mm_set_ps(0.0f, b.z, b.y, b.x); + __m128 const xpd0 = glm_vec4_cross(set0, set1); + + vec<4, float, Q> Result; + Result.data = xpd0; + return vec<3, float, Q>(Result); + } + }; + + template + struct compute_normalize<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) + { + vec<4, float, Q> Result; + Result.data = glm_vec4_normalize(v.data); + return Result; + } + }; + + template + struct compute_faceforward<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& N, vec<4, float, Q> const& I, vec<4, float, Q> const& Nref) + { + vec<4, float, Q> Result; + Result.data = glm_vec4_faceforward(N.data, I.data, Nref.data); + return Result; + } + }; + + template + struct compute_reflect<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& I, vec<4, float, Q> const& N) + { + vec<4, float, Q> Result; + Result.data = glm_vec4_reflect(I.data, N.data); + return Result; + } + }; + + template + struct compute_refract<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& I, vec<4, float, Q> const& N, float eta) + { + vec<4, float, Q> Result; + Result.data = glm_vec4_refract(I.data, N.data, _mm_set1_ps(eta)); + return Result; + } + }; +}//namespace detail +}//namespace glm + +#elif GLM_ARCH & GLM_ARCH_NEON_BIT +namespace glm{ +namespace detail +{ + template + struct compute_length<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& v) + { + return compute_dot, float, true>::call(v, v); + } + }; + + template + struct compute_distance<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& p0, vec<4, float, Q> const& p1) + { + return compute_length<4, float, Q, true>::call(p1 - p0); + } + }; + + + template + struct compute_dot, float, true> + { + GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& x, vec<4, float, Q> const& y) + { +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + float32x4_t v = vmulq_f32(x.data, y.data); + v = vpaddq_f32(v, v); + v = vpaddq_f32(v, v); + return vgetq_lane_f32(v, 0); +#else // Armv7a with Neon + float32x4_t p = vmulq_f32(x.data, y.data); + float32x2_t v = vpadd_f32(vget_low_f32(p), vget_high_f32(p)); + v = vpadd_f32(v, v); + return vget_lane_f32(v, 0); +#endif + } + }; + + template + struct compute_normalize<4, float, Q, true> + { + GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) + { + float32x4_t p = vmulq_f32(v.data, v.data); +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + p = vpaddq_f32(p, p); + p = vpaddq_f32(p, p); +#else + float32x2_t t = vpadd_f32(vget_low_f32(p), vget_high_f32(p)); + t = vpadd_f32(t, t); + p = vcombine_f32(t, t); +#endif + + float32x4_t vd = vrsqrteq_f32(p); + vec<4, float, Q> Result; + Result.data = vmulq_f32(v.data, vd); + return Result; + } + }; +}//namespace detail +}//namespace glm + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/detail/func_integer.inl b/MacroLibX/third_party/glm/detail/func_integer.inl new file mode 100644 index 0000000..091e1e0 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_integer.inl @@ -0,0 +1,372 @@ +/// @ref core + +#include "_vectorize.hpp" +#if(GLM_ARCH & GLM_ARCH_X86 && GLM_COMPILER & GLM_COMPILER_VC) +# include +# pragma intrinsic(_BitScanReverse) +#endif//(GLM_ARCH & GLM_ARCH_X86 && GLM_COMPILER & GLM_COMPILER_VC) +#include + +#if !GLM_HAS_EXTENDED_INTEGER_TYPE +# if GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic ignored "-Wlong-long" +# endif +# if (GLM_COMPILER & GLM_COMPILER_CLANG) +# pragma clang diagnostic ignored "-Wc++11-long-long" +# endif +#endif + +namespace glm{ +namespace detail +{ + template + GLM_FUNC_QUALIFIER T mask(T Bits) + { + return Bits >= static_cast(sizeof(T) * 8) ? ~static_cast(0) : (static_cast(1) << Bits) - static_cast(1); + } + + template + struct compute_bitfieldReverseStep + { + GLM_FUNC_QUALIFIER static vec call(vec const& v, T, T) + { + return v; + } + }; + + template + struct compute_bitfieldReverseStep + { + GLM_FUNC_QUALIFIER static vec call(vec const& v, T Mask, T Shift) + { + return (v & Mask) << Shift | (v & (~Mask)) >> Shift; + } + }; + + template + struct compute_bitfieldBitCountStep + { + GLM_FUNC_QUALIFIER static vec call(vec const& v, T, T) + { + return v; + } + }; + + template + struct compute_bitfieldBitCountStep + { + GLM_FUNC_QUALIFIER static vec call(vec const& v, T Mask, T Shift) + { + return (v & Mask) + ((v >> Shift) & Mask); + } + }; + + template + struct compute_findLSB + { + GLM_FUNC_QUALIFIER static int call(genIUType Value) + { + if(Value == 0) + return -1; + + return glm::bitCount(~Value & (Value - static_cast(1))); + } + }; + +# if GLM_HAS_BITSCAN_WINDOWS + template + struct compute_findLSB + { + GLM_FUNC_QUALIFIER static int call(genIUType Value) + { + unsigned long Result(0); + unsigned char IsNotNull = _BitScanForward(&Result, *reinterpret_cast(&Value)); + return IsNotNull ? int(Result) : -1; + } + }; + +# if !((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_MODEL == GLM_MODEL_32)) + template + struct compute_findLSB + { + GLM_FUNC_QUALIFIER static int call(genIUType Value) + { + unsigned long Result(0); + unsigned char IsNotNull = _BitScanForward64(&Result, *reinterpret_cast(&Value)); + return IsNotNull ? int(Result) : -1; + } + }; +# endif +# endif//GLM_HAS_BITSCAN_WINDOWS + + template + struct compute_findMSB_step_vec + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, T Shift) + { + return x | (x >> Shift); + } + }; + + template + struct compute_findMSB_step_vec + { + GLM_FUNC_QUALIFIER static vec call(vec const& x, T) + { + return x; + } + }; + + template + struct compute_findMSB_vec + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + vec x(v); + x = compute_findMSB_step_vec= 8>::call(x, static_cast( 1)); + x = compute_findMSB_step_vec= 8>::call(x, static_cast( 2)); + x = compute_findMSB_step_vec= 8>::call(x, static_cast( 4)); + x = compute_findMSB_step_vec= 16>::call(x, static_cast( 8)); + x = compute_findMSB_step_vec= 32>::call(x, static_cast(16)); + x = compute_findMSB_step_vec= 64>::call(x, static_cast(32)); + return vec(sizeof(T) * 8 - 1) - glm::bitCount(~x); + } + }; + +# if GLM_HAS_BITSCAN_WINDOWS + template + GLM_FUNC_QUALIFIER int compute_findMSB_32(genIUType Value) + { + unsigned long Result(0); + unsigned char IsNotNull = _BitScanReverse(&Result, *reinterpret_cast(&Value)); + return IsNotNull ? int(Result) : -1; + } + + template + struct compute_findMSB_vec + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return detail::functor1::call(compute_findMSB_32, x); + } + }; + +# if !((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_MODEL == GLM_MODEL_32)) + template + GLM_FUNC_QUALIFIER int compute_findMSB_64(genIUType Value) + { + unsigned long Result(0); + unsigned char IsNotNull = _BitScanReverse64(&Result, *reinterpret_cast(&Value)); + return IsNotNull ? int(Result) : -1; + } + + template + struct compute_findMSB_vec + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + return detail::functor1::call(compute_findMSB_64, x); + } + }; +# endif +# endif//GLM_HAS_BITSCAN_WINDOWS +}//namespace detail + + // uaddCarry + GLM_FUNC_QUALIFIER uint uaddCarry(uint const& x, uint const& y, uint & Carry) + { + detail::uint64 const Value64(static_cast(x) + static_cast(y)); + detail::uint64 const Max32((static_cast(1) << static_cast(32)) - static_cast(1)); + Carry = Value64 > Max32 ? 1u : 0u; + return static_cast(Value64 % (Max32 + static_cast(1))); + } + + template + GLM_FUNC_QUALIFIER vec uaddCarry(vec const& x, vec const& y, vec& Carry) + { + vec Value64(vec(x) + vec(y)); + vec Max32((static_cast(1) << static_cast(32)) - static_cast(1)); + Carry = mix(vec(0), vec(1), greaterThan(Value64, Max32)); + return vec(Value64 % (Max32 + static_cast(1))); + } + + // usubBorrow + GLM_FUNC_QUALIFIER uint usubBorrow(uint const& x, uint const& y, uint & Borrow) + { + Borrow = x >= y ? static_cast(0) : static_cast(1); + if(y >= x) + return y - x; + else + return static_cast((static_cast(1) << static_cast(32)) + (static_cast(y) - static_cast(x))); + } + + template + GLM_FUNC_QUALIFIER vec usubBorrow(vec const& x, vec const& y, vec& Borrow) + { + Borrow = mix(vec(1), vec(0), greaterThanEqual(x, y)); + vec const YgeX(y - x); + vec const XgeY(vec((static_cast(1) << static_cast(32)) + (vec(y) - vec(x)))); + return mix(XgeY, YgeX, greaterThanEqual(y, x)); + } + + // umulExtended + GLM_FUNC_QUALIFIER void umulExtended(uint const& x, uint const& y, uint & msb, uint & lsb) + { + detail::uint64 Value64 = static_cast(x) * static_cast(y); + msb = static_cast(Value64 >> static_cast(32)); + lsb = static_cast(Value64); + } + + template + GLM_FUNC_QUALIFIER void umulExtended(vec const& x, vec const& y, vec& msb, vec& lsb) + { + vec Value64(vec(x) * vec(y)); + msb = vec(Value64 >> static_cast(32)); + lsb = vec(Value64); + } + + // imulExtended + GLM_FUNC_QUALIFIER void imulExtended(int x, int y, int& msb, int& lsb) + { + detail::int64 Value64 = static_cast(x) * static_cast(y); + msb = static_cast(Value64 >> static_cast(32)); + lsb = static_cast(Value64); + } + + template + GLM_FUNC_QUALIFIER void imulExtended(vec const& x, vec const& y, vec& msb, vec& lsb) + { + vec Value64(vec(x) * vec(y)); + lsb = vec(Value64 & static_cast(0xFFFFFFFF)); + msb = vec((Value64 >> static_cast(32)) & static_cast(0xFFFFFFFF)); + } + + // bitfieldExtract + template + GLM_FUNC_QUALIFIER genIUType bitfieldExtract(genIUType Value, int Offset, int Bits) + { + return bitfieldExtract(vec<1, genIUType>(Value), Offset, Bits).x; + } + + template + GLM_FUNC_QUALIFIER vec bitfieldExtract(vec const& Value, int Offset, int Bits) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldExtract' only accept integer inputs"); + + return (Value >> static_cast(Offset)) & static_cast(detail::mask(Bits)); + } + + // bitfieldInsert + template + GLM_FUNC_QUALIFIER genIUType bitfieldInsert(genIUType const& Base, genIUType const& Insert, int Offset, int Bits) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldInsert' only accept integer values"); + + return bitfieldInsert(vec<1, genIUType>(Base), vec<1, genIUType>(Insert), Offset, Bits).x; + } + + template + GLM_FUNC_QUALIFIER vec bitfieldInsert(vec const& Base, vec const& Insert, int Offset, int Bits) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldInsert' only accept integer values"); + + T const Mask = static_cast(detail::mask(Bits) << Offset); + return (Base & ~Mask) | ((Insert << static_cast(Offset)) & Mask); + } + + // bitfieldReverse + template + GLM_FUNC_QUALIFIER genIUType bitfieldReverse(genIUType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldReverse' only accept integer values"); + + return bitfieldReverse(glm::vec<1, genIUType, glm::defaultp>(x)).x; + } + + template + GLM_FUNC_QUALIFIER vec bitfieldReverse(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldReverse' only accept integer values"); + + vec x(v); + x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 2>::call(x, static_cast(0x5555555555555555ull), static_cast( 1)); + x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 4>::call(x, static_cast(0x3333333333333333ull), static_cast( 2)); + x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 8>::call(x, static_cast(0x0F0F0F0F0F0F0F0Full), static_cast( 4)); + x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 16>::call(x, static_cast(0x00FF00FF00FF00FFull), static_cast( 8)); + x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 32>::call(x, static_cast(0x0000FFFF0000FFFFull), static_cast(16)); + x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 64>::call(x, static_cast(0x00000000FFFFFFFFull), static_cast(32)); + return x; + } + + // bitCount + template + GLM_FUNC_QUALIFIER int bitCount(genIUType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitCount' only accept integer values"); + + return bitCount(glm::vec<1, genIUType, glm::defaultp>(x)).x; + } + + template + GLM_FUNC_QUALIFIER vec bitCount(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitCount' only accept integer values"); + +# if GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(push) +# pragma warning(disable : 4310) //cast truncates constant value +# endif + + vec::type, Q> x(*reinterpret_cast::type, Q> const *>(&v)); + x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 2>::call(x, typename detail::make_unsigned::type(0x5555555555555555ull), typename detail::make_unsigned::type( 1)); + x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 4>::call(x, typename detail::make_unsigned::type(0x3333333333333333ull), typename detail::make_unsigned::type( 2)); + x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 8>::call(x, typename detail::make_unsigned::type(0x0F0F0F0F0F0F0F0Full), typename detail::make_unsigned::type( 4)); + x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 16>::call(x, typename detail::make_unsigned::type(0x00FF00FF00FF00FFull), typename detail::make_unsigned::type( 8)); + x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 32>::call(x, typename detail::make_unsigned::type(0x0000FFFF0000FFFFull), typename detail::make_unsigned::type(16)); + x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 64>::call(x, typename detail::make_unsigned::type(0x00000000FFFFFFFFull), typename detail::make_unsigned::type(32)); + return vec(x); + +# if GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(pop) +# endif + } + + // findLSB + template + GLM_FUNC_QUALIFIER int findLSB(genIUType Value) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findLSB' only accept integer values"); + + return detail::compute_findLSB::call(Value); + } + + template + GLM_FUNC_QUALIFIER vec findLSB(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findLSB' only accept integer values"); + + return detail::functor1::call(findLSB, x); + } + + // findMSB + template + GLM_FUNC_QUALIFIER int findMSB(genIUType v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findMSB' only accept integer values"); + + return findMSB(vec<1, genIUType>(v)).x; + } + + template + GLM_FUNC_QUALIFIER vec findMSB(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findMSB' only accept integer values"); + + return detail::compute_findMSB_vec::call(v); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "func_integer_simd.inl" +#endif + diff --git a/MacroLibX/third_party/glm/detail/func_integer_simd.inl b/MacroLibX/third_party/glm/detail/func_integer_simd.inl new file mode 100644 index 0000000..8be6c9c --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_integer_simd.inl @@ -0,0 +1,65 @@ +#include "../simd/integer.h" + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +namespace glm{ +namespace detail +{ + template + struct compute_bitfieldReverseStep<4, uint, Q, true, true> + { + GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& v, uint Mask, uint Shift) + { + __m128i const set0 = v.data; + + __m128i const set1 = _mm_set1_epi32(static_cast(Mask)); + __m128i const and1 = _mm_and_si128(set0, set1); + __m128i const sft1 = _mm_slli_epi32(and1, Shift); + + __m128i const set2 = _mm_andnot_si128(set0, _mm_set1_epi32(-1)); + __m128i const and2 = _mm_and_si128(set0, set2); + __m128i const sft2 = _mm_srai_epi32(and2, Shift); + + __m128i const or0 = _mm_or_si128(sft1, sft2); + + return or0; + } + }; + + template + struct compute_bitfieldBitCountStep<4, uint, Q, true, true> + { + GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& v, uint Mask, uint Shift) + { + __m128i const set0 = v.data; + + __m128i const set1 = _mm_set1_epi32(static_cast(Mask)); + __m128i const and0 = _mm_and_si128(set0, set1); + __m128i const sft0 = _mm_slli_epi32(set0, Shift); + __m128i const and1 = _mm_and_si128(sft0, set1); + __m128i const add0 = _mm_add_epi32(and0, and1); + + return add0; + } + }; +}//namespace detail + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template<> + GLM_FUNC_QUALIFIER int bitCount(uint x) + { + return _mm_popcnt_u32(x); + } + +# if(GLM_MODEL == GLM_MODEL_64) + template<> + GLM_FUNC_QUALIFIER int bitCount(detail::uint64 x) + { + return static_cast(_mm_popcnt_u64(x)); + } +# endif//GLM_MODEL +# endif//GLM_ARCH + +}//namespace glm + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/detail/func_matrix.inl b/MacroLibX/third_party/glm/detail/func_matrix.inl new file mode 100644 index 0000000..d980c6d --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_matrix.inl @@ -0,0 +1,398 @@ +#include "../geometric.hpp" +#include + +namespace glm{ +namespace detail +{ + template + struct compute_matrixCompMult + { + GLM_FUNC_QUALIFIER static mat call(mat const& x, mat const& y) + { + mat Result; + for(length_t i = 0; i < Result.length(); ++i) + Result[i] = x[i] * y[i]; + return Result; + } + }; + + template + struct compute_transpose{}; + + template + struct compute_transpose<2, 2, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<2, 2, T, Q> call(mat<2, 2, T, Q> const& m) + { + mat<2, 2, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + return Result; + } + }; + + template + struct compute_transpose<2, 3, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<3, 2, T, Q> call(mat<2, 3, T, Q> const& m) + { + mat<3,2, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[2][0] = m[0][2]; + Result[2][1] = m[1][2]; + return Result; + } + }; + + template + struct compute_transpose<2, 4, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<4, 2, T, Q> call(mat<2, 4, T, Q> const& m) + { + mat<4, 2, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[2][0] = m[0][2]; + Result[2][1] = m[1][2]; + Result[3][0] = m[0][3]; + Result[3][1] = m[1][3]; + return Result; + } + }; + + template + struct compute_transpose<3, 2, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<2, 3, T, Q> call(mat<3, 2, T, Q> const& m) + { + mat<2, 3, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[0][2] = m[2][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[1][2] = m[2][1]; + return Result; + } + }; + + template + struct compute_transpose<3, 3, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<3, 3, T, Q> call(mat<3, 3, T, Q> const& m) + { + mat<3, 3, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[0][2] = m[2][0]; + + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[1][2] = m[2][1]; + + Result[2][0] = m[0][2]; + Result[2][1] = m[1][2]; + Result[2][2] = m[2][2]; + return Result; + } + }; + + template + struct compute_transpose<3, 4, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<4, 3, T, Q> call(mat<3, 4, T, Q> const& m) + { + mat<4, 3, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[0][2] = m[2][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[1][2] = m[2][1]; + Result[2][0] = m[0][2]; + Result[2][1] = m[1][2]; + Result[2][2] = m[2][2]; + Result[3][0] = m[0][3]; + Result[3][1] = m[1][3]; + Result[3][2] = m[2][3]; + return Result; + } + }; + + template + struct compute_transpose<4, 2, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<2, 4, T, Q> call(mat<4, 2, T, Q> const& m) + { + mat<2, 4, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[0][2] = m[2][0]; + Result[0][3] = m[3][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[1][2] = m[2][1]; + Result[1][3] = m[3][1]; + return Result; + } + }; + + template + struct compute_transpose<4, 3, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<3, 4, T, Q> call(mat<4, 3, T, Q> const& m) + { + mat<3, 4, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[0][2] = m[2][0]; + Result[0][3] = m[3][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[1][2] = m[2][1]; + Result[1][3] = m[3][1]; + Result[2][0] = m[0][2]; + Result[2][1] = m[1][2]; + Result[2][2] = m[2][2]; + Result[2][3] = m[3][2]; + return Result; + } + }; + + template + struct compute_transpose<4, 4, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<4, 4, T, Q> call(mat<4, 4, T, Q> const& m) + { + mat<4, 4, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[0][2] = m[2][0]; + Result[0][3] = m[3][0]; + + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[1][2] = m[2][1]; + Result[1][3] = m[3][1]; + + Result[2][0] = m[0][2]; + Result[2][1] = m[1][2]; + Result[2][2] = m[2][2]; + Result[2][3] = m[3][2]; + + Result[3][0] = m[0][3]; + Result[3][1] = m[1][3]; + Result[3][2] = m[2][3]; + Result[3][3] = m[3][3]; + return Result; + } + }; + + template + struct compute_determinant{}; + + template + struct compute_determinant<2, 2, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static T call(mat<2, 2, T, Q> const& m) + { + return m[0][0] * m[1][1] - m[1][0] * m[0][1]; + } + }; + + template + struct compute_determinant<3, 3, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static T call(mat<3, 3, T, Q> const& m) + { + return + + m[0][0] * (m[1][1] * m[2][2] - m[2][1] * m[1][2]) + - m[1][0] * (m[0][1] * m[2][2] - m[2][1] * m[0][2]) + + m[2][0] * (m[0][1] * m[1][2] - m[1][1] * m[0][2]); + } + }; + + template + struct compute_determinant<4, 4, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static T call(mat<4, 4, T, Q> const& m) + { + T SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + T SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + T SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + T SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + T SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + T SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + + vec<4, T, Q> DetCof( + + (m[1][1] * SubFactor00 - m[1][2] * SubFactor01 + m[1][3] * SubFactor02), + - (m[1][0] * SubFactor00 - m[1][2] * SubFactor03 + m[1][3] * SubFactor04), + + (m[1][0] * SubFactor01 - m[1][1] * SubFactor03 + m[1][3] * SubFactor05), + - (m[1][0] * SubFactor02 - m[1][1] * SubFactor04 + m[1][2] * SubFactor05)); + + return + m[0][0] * DetCof[0] + m[0][1] * DetCof[1] + + m[0][2] * DetCof[2] + m[0][3] * DetCof[3]; + } + }; + + template + struct compute_inverse{}; + + template + struct compute_inverse<2, 2, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<2, 2, T, Q> call(mat<2, 2, T, Q> const& m) + { + T OneOverDeterminant = static_cast(1) / ( + + m[0][0] * m[1][1] + - m[1][0] * m[0][1]); + + mat<2, 2, T, Q> Inverse( + + m[1][1] * OneOverDeterminant, + - m[0][1] * OneOverDeterminant, + - m[1][0] * OneOverDeterminant, + + m[0][0] * OneOverDeterminant); + + return Inverse; + } + }; + + template + struct compute_inverse<3, 3, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<3, 3, T, Q> call(mat<3, 3, T, Q> const& m) + { + T OneOverDeterminant = static_cast(1) / ( + + m[0][0] * (m[1][1] * m[2][2] - m[2][1] * m[1][2]) + - m[1][0] * (m[0][1] * m[2][2] - m[2][1] * m[0][2]) + + m[2][0] * (m[0][1] * m[1][2] - m[1][1] * m[0][2])); + + mat<3, 3, T, Q> Inverse; + Inverse[0][0] = + (m[1][1] * m[2][2] - m[2][1] * m[1][2]) * OneOverDeterminant; + Inverse[1][0] = - (m[1][0] * m[2][2] - m[2][0] * m[1][2]) * OneOverDeterminant; + Inverse[2][0] = + (m[1][0] * m[2][1] - m[2][0] * m[1][1]) * OneOverDeterminant; + Inverse[0][1] = - (m[0][1] * m[2][2] - m[2][1] * m[0][2]) * OneOverDeterminant; + Inverse[1][1] = + (m[0][0] * m[2][2] - m[2][0] * m[0][2]) * OneOverDeterminant; + Inverse[2][1] = - (m[0][0] * m[2][1] - m[2][0] * m[0][1]) * OneOverDeterminant; + Inverse[0][2] = + (m[0][1] * m[1][2] - m[1][1] * m[0][2]) * OneOverDeterminant; + Inverse[1][2] = - (m[0][0] * m[1][2] - m[1][0] * m[0][2]) * OneOverDeterminant; + Inverse[2][2] = + (m[0][0] * m[1][1] - m[1][0] * m[0][1]) * OneOverDeterminant; + + return Inverse; + } + }; + + template + struct compute_inverse<4, 4, T, Q, Aligned> + { + GLM_FUNC_QUALIFIER static mat<4, 4, T, Q> call(mat<4, 4, T, Q> const& m) + { + T Coef00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + T Coef02 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; + T Coef03 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; + + T Coef04 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + T Coef06 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; + T Coef07 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; + + T Coef08 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + T Coef10 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; + T Coef11 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; + + T Coef12 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + T Coef14 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; + T Coef15 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; + + T Coef16 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + T Coef18 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; + T Coef19 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; + + T Coef20 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + T Coef22 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; + T Coef23 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; + + vec<4, T, Q> Fac0(Coef00, Coef00, Coef02, Coef03); + vec<4, T, Q> Fac1(Coef04, Coef04, Coef06, Coef07); + vec<4, T, Q> Fac2(Coef08, Coef08, Coef10, Coef11); + vec<4, T, Q> Fac3(Coef12, Coef12, Coef14, Coef15); + vec<4, T, Q> Fac4(Coef16, Coef16, Coef18, Coef19); + vec<4, T, Q> Fac5(Coef20, Coef20, Coef22, Coef23); + + vec<4, T, Q> Vec0(m[1][0], m[0][0], m[0][0], m[0][0]); + vec<4, T, Q> Vec1(m[1][1], m[0][1], m[0][1], m[0][1]); + vec<4, T, Q> Vec2(m[1][2], m[0][2], m[0][2], m[0][2]); + vec<4, T, Q> Vec3(m[1][3], m[0][3], m[0][3], m[0][3]); + + vec<4, T, Q> Inv0(Vec1 * Fac0 - Vec2 * Fac1 + Vec3 * Fac2); + vec<4, T, Q> Inv1(Vec0 * Fac0 - Vec2 * Fac3 + Vec3 * Fac4); + vec<4, T, Q> Inv2(Vec0 * Fac1 - Vec1 * Fac3 + Vec3 * Fac5); + vec<4, T, Q> Inv3(Vec0 * Fac2 - Vec1 * Fac4 + Vec2 * Fac5); + + vec<4, T, Q> SignA(+1, -1, +1, -1); + vec<4, T, Q> SignB(-1, +1, -1, +1); + mat<4, 4, T, Q> Inverse(Inv0 * SignA, Inv1 * SignB, Inv2 * SignA, Inv3 * SignB); + + vec<4, T, Q> Row0(Inverse[0][0], Inverse[1][0], Inverse[2][0], Inverse[3][0]); + + vec<4, T, Q> Dot0(m[0] * Row0); + T Dot1 = (Dot0.x + Dot0.y) + (Dot0.z + Dot0.w); + + T OneOverDeterminant = static_cast(1) / Dot1; + + return Inverse * OneOverDeterminant; + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER mat matrixCompMult(mat const& x, mat const& y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'matrixCompMult' only accept floating-point inputs"); + return detail::compute_matrixCompMult::value>::call(x, y); + } + + template + GLM_FUNC_QUALIFIER typename detail::outerProduct_trait::type outerProduct(vec const& c, vec const& r) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'outerProduct' only accept floating-point inputs"); + + typename detail::outerProduct_trait::type m; + for(length_t i = 0; i < m.length(); ++i) + m[i] = c * r[i]; + return m; + } + + template + GLM_FUNC_QUALIFIER typename mat::transpose_type transpose(mat const& m) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'transpose' only accept floating-point inputs"); + return detail::compute_transpose::value>::call(m); + } + + template + GLM_FUNC_QUALIFIER T determinant(mat const& m) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'determinant' only accept floating-point inputs"); + return detail::compute_determinant::value>::call(m); + } + + template + GLM_FUNC_QUALIFIER mat inverse(mat const& m) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'inverse' only accept floating-point inputs"); + return detail::compute_inverse::value>::call(m); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "func_matrix_simd.inl" +#endif + diff --git a/MacroLibX/third_party/glm/detail/func_matrix_simd.inl b/MacroLibX/third_party/glm/detail/func_matrix_simd.inl new file mode 100644 index 0000000..f67ac66 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_matrix_simd.inl @@ -0,0 +1,249 @@ +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +#include "type_mat4x4.hpp" +#include "../geometric.hpp" +#include "../simd/matrix.h" +#include + +namespace glm{ +namespace detail +{ +# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE + template + struct compute_matrixCompMult<4, 4, float, Q, true> + { + GLM_STATIC_ASSERT(detail::is_aligned::value, "Specialization requires aligned"); + + GLM_FUNC_QUALIFIER static mat<4, 4, float, Q> call(mat<4, 4, float, Q> const& x, mat<4, 4, float, Q> const& y) + { + mat<4, 4, float, Q> Result; + glm_mat4_matrixCompMult( + *static_cast(&x[0].data), + *static_cast(&y[0].data), + *static_cast(&Result[0].data)); + return Result; + } + }; +# endif + + template + struct compute_transpose<4, 4, float, Q, true> + { + GLM_FUNC_QUALIFIER static mat<4, 4, float, Q> call(mat<4, 4, float, Q> const& m) + { + mat<4, 4, float, Q> Result; + glm_mat4_transpose(&m[0].data, &Result[0].data); + return Result; + } + }; + + template + struct compute_determinant<4, 4, float, Q, true> + { + GLM_FUNC_QUALIFIER static float call(mat<4, 4, float, Q> const& m) + { + return _mm_cvtss_f32(glm_mat4_determinant(&m[0].data)); + } + }; + + template + struct compute_inverse<4, 4, float, Q, true> + { + GLM_FUNC_QUALIFIER static mat<4, 4, float, Q> call(mat<4, 4, float, Q> const& m) + { + mat<4, 4, float, Q> Result; + glm_mat4_inverse(&m[0].data, &Result[0].data); + return Result; + } + }; +}//namespace detail + +# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE + template<> + GLM_FUNC_QUALIFIER mat<4, 4, float, aligned_lowp> outerProduct<4, 4, float, aligned_lowp>(vec<4, float, aligned_lowp> const& c, vec<4, float, aligned_lowp> const& r) + { + __m128 NativeResult[4]; + glm_mat4_outerProduct(c.data, r.data, NativeResult); + mat<4, 4, float, aligned_lowp> Result; + std::memcpy(&Result[0], &NativeResult[0], sizeof(Result)); + return Result; + } + + template<> + GLM_FUNC_QUALIFIER mat<4, 4, float, aligned_mediump> outerProduct<4, 4, float, aligned_mediump>(vec<4, float, aligned_mediump> const& c, vec<4, float, aligned_mediump> const& r) + { + __m128 NativeResult[4]; + glm_mat4_outerProduct(c.data, r.data, NativeResult); + mat<4, 4, float, aligned_mediump> Result; + std::memcpy(&Result[0], &NativeResult[0], sizeof(Result)); + return Result; + } + + template<> + GLM_FUNC_QUALIFIER mat<4, 4, float, aligned_highp> outerProduct<4, 4, float, aligned_highp>(vec<4, float, aligned_highp> const& c, vec<4, float, aligned_highp> const& r) + { + __m128 NativeResult[4]; + glm_mat4_outerProduct(c.data, r.data, NativeResult); + mat<4, 4, float, aligned_highp> Result; + std::memcpy(&Result[0], &NativeResult[0], sizeof(Result)); + return Result; + } +# endif +}//namespace glm + +#elif GLM_ARCH & GLM_ARCH_NEON_BIT + +namespace glm { +#if GLM_LANG & GLM_LANG_CXX11_FLAG + template + GLM_FUNC_QUALIFIER + typename std::enable_if::value, mat<4, 4, float, Q>>::type + operator*(mat<4, 4, float, Q> const & m1, mat<4, 4, float, Q> const & m2) + { + auto MulRow = [&](int l) { + float32x4_t const SrcA = m2[l].data; + + float32x4_t r = neon::mul_lane(m1[0].data, SrcA, 0); + r = neon::madd_lane(r, m1[1].data, SrcA, 1); + r = neon::madd_lane(r, m1[2].data, SrcA, 2); + r = neon::madd_lane(r, m1[3].data, SrcA, 3); + + return r; + }; + + mat<4, 4, float, aligned_highp> Result; + Result[0].data = MulRow(0); + Result[1].data = MulRow(1); + Result[2].data = MulRow(2); + Result[3].data = MulRow(3); + + return Result; + } +#endif // CXX11 + + template + struct detail::compute_inverse<4, 4, float, Q, true> + { + GLM_FUNC_QUALIFIER static mat<4, 4, float, Q> call(mat<4, 4, float, Q> const& m) + { + float32x4_t const& m0 = m[0].data; + float32x4_t const& m1 = m[1].data; + float32x4_t const& m2 = m[2].data; + float32x4_t const& m3 = m[3].data; + + // m[2][2] * m[3][3] - m[3][2] * m[2][3]; + // m[2][2] * m[3][3] - m[3][2] * m[2][3]; + // m[1][2] * m[3][3] - m[3][2] * m[1][3]; + // m[1][2] * m[2][3] - m[2][2] * m[1][3]; + + float32x4_t Fac0; + { + float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 2), neon::dup_lane(m1, 2)); + float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 3), 3, m2, 3); + float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 2), 3, m2, 2); + float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 3), neon::dup_lane(m1, 3)); + Fac0 = w0 * w1 - w2 * w3; + } + + // m[2][1] * m[3][3] - m[3][1] * m[2][3]; + // m[2][1] * m[3][3] - m[3][1] * m[2][3]; + // m[1][1] * m[3][3] - m[3][1] * m[1][3]; + // m[1][1] * m[2][3] - m[2][1] * m[1][3]; + + float32x4_t Fac1; + { + float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 1), neon::dup_lane(m1, 1)); + float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 3), 3, m2, 3); + float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 1), 3, m2, 1); + float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 3), neon::dup_lane(m1, 3)); + Fac1 = w0 * w1 - w2 * w3; + } + + // m[2][1] * m[3][2] - m[3][1] * m[2][2]; + // m[2][1] * m[3][2] - m[3][1] * m[2][2]; + // m[1][1] * m[3][2] - m[3][1] * m[1][2]; + // m[1][1] * m[2][2] - m[2][1] * m[1][2]; + + float32x4_t Fac2; + { + float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 1), neon::dup_lane(m1, 1)); + float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 2), 3, m2, 2); + float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 1), 3, m2, 1); + float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 2), neon::dup_lane(m1, 2)); + Fac2 = w0 * w1 - w2 * w3; + } + + // m[2][0] * m[3][3] - m[3][0] * m[2][3]; + // m[2][0] * m[3][3] - m[3][0] * m[2][3]; + // m[1][0] * m[3][3] - m[3][0] * m[1][3]; + // m[1][0] * m[2][3] - m[2][0] * m[1][3]; + + float32x4_t Fac3; + { + float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 0), neon::dup_lane(m1, 0)); + float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 3), 3, m2, 3); + float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 0), 3, m2, 0); + float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 3), neon::dup_lane(m1, 3)); + Fac3 = w0 * w1 - w2 * w3; + } + + // m[2][0] * m[3][2] - m[3][0] * m[2][2]; + // m[2][0] * m[3][2] - m[3][0] * m[2][2]; + // m[1][0] * m[3][2] - m[3][0] * m[1][2]; + // m[1][0] * m[2][2] - m[2][0] * m[1][2]; + + float32x4_t Fac4; + { + float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 0), neon::dup_lane(m1, 0)); + float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 2), 3, m2, 2); + float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 0), 3, m2, 0); + float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 2), neon::dup_lane(m1, 2)); + Fac4 = w0 * w1 - w2 * w3; + } + + // m[2][0] * m[3][1] - m[3][0] * m[2][1]; + // m[2][0] * m[3][1] - m[3][0] * m[2][1]; + // m[1][0] * m[3][1] - m[3][0] * m[1][1]; + // m[1][0] * m[2][1] - m[2][0] * m[1][1]; + + float32x4_t Fac5; + { + float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 0), neon::dup_lane(m1, 0)); + float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 1), 3, m2, 1); + float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 0), 3, m2, 0); + float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 1), neon::dup_lane(m1, 1)); + Fac5 = w0 * w1 - w2 * w3; + } + + float32x4_t Vec0 = neon::copy_lane(neon::dupq_lane(m0, 0), 0, m1, 0); // (m[1][0], m[0][0], m[0][0], m[0][0]); + float32x4_t Vec1 = neon::copy_lane(neon::dupq_lane(m0, 1), 0, m1, 1); // (m[1][1], m[0][1], m[0][1], m[0][1]); + float32x4_t Vec2 = neon::copy_lane(neon::dupq_lane(m0, 2), 0, m1, 2); // (m[1][2], m[0][2], m[0][2], m[0][2]); + float32x4_t Vec3 = neon::copy_lane(neon::dupq_lane(m0, 3), 0, m1, 3); // (m[1][3], m[0][3], m[0][3], m[0][3]); + + float32x4_t Inv0 = Vec1 * Fac0 - Vec2 * Fac1 + Vec3 * Fac2; + float32x4_t Inv1 = Vec0 * Fac0 - Vec2 * Fac3 + Vec3 * Fac4; + float32x4_t Inv2 = Vec0 * Fac1 - Vec1 * Fac3 + Vec3 * Fac5; + float32x4_t Inv3 = Vec0 * Fac2 - Vec1 * Fac4 + Vec2 * Fac5; + + float32x4_t r0 = float32x4_t{-1, +1, -1, +1} * Inv0; + float32x4_t r1 = float32x4_t{+1, -1, +1, -1} * Inv1; + float32x4_t r2 = float32x4_t{-1, +1, -1, +1} * Inv2; + float32x4_t r3 = float32x4_t{+1, -1, +1, -1} * Inv3; + + float32x4_t det = neon::mul_lane(r0, m0, 0); + det = neon::madd_lane(det, r1, m0, 1); + det = neon::madd_lane(det, r2, m0, 2); + det = neon::madd_lane(det, r3, m0, 3); + + float32x4_t rdet = vdupq_n_f32(1 / vgetq_lane_f32(det, 0)); + + mat<4, 4, float, Q> r; + r[0].data = vmulq_f32(r0, rdet); + r[1].data = vmulq_f32(r1, rdet); + r[2].data = vmulq_f32(r2, rdet); + r[3].data = vmulq_f32(r3, rdet); + return r; + } + }; +}//namespace glm +#endif diff --git a/MacroLibX/third_party/glm/detail/func_packing.inl b/MacroLibX/third_party/glm/detail/func_packing.inl new file mode 100644 index 0000000..234b093 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_packing.inl @@ -0,0 +1,189 @@ +/// @ref core +/// @file glm/detail/func_packing.inl + +#include "../common.hpp" +#include "type_half.hpp" + +namespace glm +{ + GLM_FUNC_QUALIFIER uint packUnorm2x16(vec2 const& v) + { + union + { + unsigned short in[2]; + uint out; + } u; + + vec<2, unsigned short, defaultp> result(round(clamp(v, 0.0f, 1.0f) * 65535.0f)); + + u.in[0] = result[0]; + u.in[1] = result[1]; + + return u.out; + } + + GLM_FUNC_QUALIFIER vec2 unpackUnorm2x16(uint p) + { + union + { + uint in; + unsigned short out[2]; + } u; + + u.in = p; + + return vec2(u.out[0], u.out[1]) * 1.5259021896696421759365224689097e-5f; + } + + GLM_FUNC_QUALIFIER uint packSnorm2x16(vec2 const& v) + { + union + { + signed short in[2]; + uint out; + } u; + + vec<2, short, defaultp> result(round(clamp(v, -1.0f, 1.0f) * 32767.0f)); + + u.in[0] = result[0]; + u.in[1] = result[1]; + + return u.out; + } + + GLM_FUNC_QUALIFIER vec2 unpackSnorm2x16(uint p) + { + union + { + uint in; + signed short out[2]; + } u; + + u.in = p; + + return clamp(vec2(u.out[0], u.out[1]) * 3.0518509475997192297128208258309e-5f, -1.0f, 1.0f); + } + + GLM_FUNC_QUALIFIER uint packUnorm4x8(vec4 const& v) + { + union + { + unsigned char in[4]; + uint out; + } u; + + vec<4, unsigned char, defaultp> result(round(clamp(v, 0.0f, 1.0f) * 255.0f)); + + u.in[0] = result[0]; + u.in[1] = result[1]; + u.in[2] = result[2]; + u.in[3] = result[3]; + + return u.out; + } + + GLM_FUNC_QUALIFIER vec4 unpackUnorm4x8(uint p) + { + union + { + uint in; + unsigned char out[4]; + } u; + + u.in = p; + + return vec4(u.out[0], u.out[1], u.out[2], u.out[3]) * 0.0039215686274509803921568627451f; + } + + GLM_FUNC_QUALIFIER uint packSnorm4x8(vec4 const& v) + { + union + { + signed char in[4]; + uint out; + } u; + + vec<4, signed char, defaultp> result(round(clamp(v, -1.0f, 1.0f) * 127.0f)); + + u.in[0] = result[0]; + u.in[1] = result[1]; + u.in[2] = result[2]; + u.in[3] = result[3]; + + return u.out; + } + + GLM_FUNC_QUALIFIER glm::vec4 unpackSnorm4x8(uint p) + { + union + { + uint in; + signed char out[4]; + } u; + + u.in = p; + + return clamp(vec4(u.out[0], u.out[1], u.out[2], u.out[3]) * 0.0078740157480315f, -1.0f, 1.0f); + } + + GLM_FUNC_QUALIFIER double packDouble2x32(uvec2 const& v) + { + union + { + uint in[2]; + double out; + } u; + + u.in[0] = v[0]; + u.in[1] = v[1]; + + return u.out; + } + + GLM_FUNC_QUALIFIER uvec2 unpackDouble2x32(double v) + { + union + { + double in; + uint out[2]; + } u; + + u.in = v; + + return uvec2(u.out[0], u.out[1]); + } + + GLM_FUNC_QUALIFIER uint packHalf2x16(vec2 const& v) + { + union + { + signed short in[2]; + uint out; + } u; + + u.in[0] = detail::toFloat16(v.x); + u.in[1] = detail::toFloat16(v.y); + + return u.out; + } + + GLM_FUNC_QUALIFIER vec2 unpackHalf2x16(uint v) + { + union + { + uint in; + signed short out[2]; + } u; + + u.in = v; + + return vec2( + detail::toFloat32(u.out[0]), + detail::toFloat32(u.out[1])); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "func_packing_simd.inl" +#endif + diff --git a/MacroLibX/third_party/glm/detail/func_packing_simd.inl b/MacroLibX/third_party/glm/detail/func_packing_simd.inl new file mode 100644 index 0000000..fd0fe8b --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_packing_simd.inl @@ -0,0 +1,6 @@ +namespace glm{ +namespace detail +{ + +}//namespace detail +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/func_trigonometric.inl b/MacroLibX/third_party/glm/detail/func_trigonometric.inl new file mode 100644 index 0000000..e129dce --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_trigonometric.inl @@ -0,0 +1,197 @@ +#include "_vectorize.hpp" +#include +#include + +namespace glm +{ + // radians + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType radians(genType degrees) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'radians' only accept floating-point input"); + + return degrees * static_cast(0.01745329251994329576923690768489); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec radians(vec const& v) + { + return detail::functor1::call(radians, v); + } + + // degrees + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType degrees(genType radians) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'degrees' only accept floating-point input"); + + return radians * static_cast(57.295779513082320876798154814105); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec degrees(vec const& v) + { + return detail::functor1::call(degrees, v); + } + + // sin + using ::std::sin; + + template + GLM_FUNC_QUALIFIER vec sin(vec const& v) + { + return detail::functor1::call(sin, v); + } + + // cos + using std::cos; + + template + GLM_FUNC_QUALIFIER vec cos(vec const& v) + { + return detail::functor1::call(cos, v); + } + + // tan + using std::tan; + + template + GLM_FUNC_QUALIFIER vec tan(vec const& v) + { + return detail::functor1::call(tan, v); + } + + // asin + using std::asin; + + template + GLM_FUNC_QUALIFIER vec asin(vec const& v) + { + return detail::functor1::call(asin, v); + } + + // acos + using std::acos; + + template + GLM_FUNC_QUALIFIER vec acos(vec const& v) + { + return detail::functor1::call(acos, v); + } + + // atan + template + GLM_FUNC_QUALIFIER genType atan(genType y, genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'atan' only accept floating-point input"); + + return ::std::atan2(y, x); + } + + template + GLM_FUNC_QUALIFIER vec atan(vec const& a, vec const& b) + { + return detail::functor2::call(::std::atan2, a, b); + } + + using std::atan; + + template + GLM_FUNC_QUALIFIER vec atan(vec const& v) + { + return detail::functor1::call(atan, v); + } + + // sinh + using std::sinh; + + template + GLM_FUNC_QUALIFIER vec sinh(vec const& v) + { + return detail::functor1::call(sinh, v); + } + + // cosh + using std::cosh; + + template + GLM_FUNC_QUALIFIER vec cosh(vec const& v) + { + return detail::functor1::call(cosh, v); + } + + // tanh + using std::tanh; + + template + GLM_FUNC_QUALIFIER vec tanh(vec const& v) + { + return detail::functor1::call(tanh, v); + } + + // asinh +# if GLM_HAS_CXX11_STL + using std::asinh; +# else + template + GLM_FUNC_QUALIFIER genType asinh(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asinh' only accept floating-point input"); + + return (x < static_cast(0) ? static_cast(-1) : (x > static_cast(0) ? static_cast(1) : static_cast(0))) * log(std::abs(x) + sqrt(static_cast(1) + x * x)); + } +# endif + + template + GLM_FUNC_QUALIFIER vec asinh(vec const& v) + { + return detail::functor1::call(asinh, v); + } + + // acosh +# if GLM_HAS_CXX11_STL + using std::acosh; +# else + template + GLM_FUNC_QUALIFIER genType acosh(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acosh' only accept floating-point input"); + + if(x < static_cast(1)) + return static_cast(0); + return log(x + sqrt(x * x - static_cast(1))); + } +# endif + + template + GLM_FUNC_QUALIFIER vec acosh(vec const& v) + { + return detail::functor1::call(acosh, v); + } + + // atanh +# if GLM_HAS_CXX11_STL + using std::atanh; +# else + template + GLM_FUNC_QUALIFIER genType atanh(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'atanh' only accept floating-point input"); + + if(std::abs(x) >= static_cast(1)) + return 0; + return static_cast(0.5) * log((static_cast(1) + x) / (static_cast(1) - x)); + } +# endif + + template + GLM_FUNC_QUALIFIER vec atanh(vec const& v) + { + return detail::functor1::call(atanh, v); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "func_trigonometric_simd.inl" +#endif + diff --git a/MacroLibX/third_party/glm/detail/func_trigonometric_simd.inl b/MacroLibX/third_party/glm/detail/func_trigonometric_simd.inl new file mode 100644 index 0000000..e69de29 diff --git a/MacroLibX/third_party/glm/detail/func_vector_relational.inl b/MacroLibX/third_party/glm/detail/func_vector_relational.inl new file mode 100644 index 0000000..80c9e87 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_vector_relational.inl @@ -0,0 +1,87 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec lessThan(vec const& x, vec const& y) + { + vec Result(true); + for(length_t i = 0; i < L; ++i) + Result[i] = x[i] < y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec lessThanEqual(vec const& x, vec const& y) + { + vec Result(true); + for(length_t i = 0; i < L; ++i) + Result[i] = x[i] <= y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec greaterThan(vec const& x, vec const& y) + { + vec Result(true); + for(length_t i = 0; i < L; ++i) + Result[i] = x[i] > y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec greaterThanEqual(vec const& x, vec const& y) + { + vec Result(true); + for(length_t i = 0; i < L; ++i) + Result[i] = x[i] >= y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y) + { + vec Result(true); + for(length_t i = 0; i < L; ++i) + Result[i] = x[i] == y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y) + { + vec Result(true); + for(length_t i = 0; i < L; ++i) + Result[i] = x[i] != y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool any(vec const& v) + { + bool Result = false; + for(length_t i = 0; i < L; ++i) + Result = Result || v[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool all(vec const& v) + { + bool Result = true; + for(length_t i = 0; i < L; ++i) + Result = Result && v[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec not_(vec const& v) + { + vec Result(true); + for(length_t i = 0; i < L; ++i) + Result[i] = !v[i]; + return Result; + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "func_vector_relational_simd.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/func_vector_relational_simd.inl b/MacroLibX/third_party/glm/detail/func_vector_relational_simd.inl new file mode 100644 index 0000000..fd0fe8b --- /dev/null +++ b/MacroLibX/third_party/glm/detail/func_vector_relational_simd.inl @@ -0,0 +1,6 @@ +namespace glm{ +namespace detail +{ + +}//namespace detail +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/glm.cpp b/MacroLibX/third_party/glm/detail/glm.cpp new file mode 100644 index 0000000..e0755bd --- /dev/null +++ b/MacroLibX/third_party/glm/detail/glm.cpp @@ -0,0 +1,263 @@ +/// @ref core +/// @file glm/glm.cpp + +#ifndef GLM_ENABLE_EXPERIMENTAL +#define GLM_ENABLE_EXPERIMENTAL +#endif +#include +#include +#include +#include +#include +#include + +namespace glm +{ +// tvec1 type explicit instantiation +template struct vec<1, uint8, lowp>; +template struct vec<1, uint16, lowp>; +template struct vec<1, uint32, lowp>; +template struct vec<1, uint64, lowp>; +template struct vec<1, int8, lowp>; +template struct vec<1, int16, lowp>; +template struct vec<1, int32, lowp>; +template struct vec<1, int64, lowp>; +template struct vec<1, float32, lowp>; +template struct vec<1, float64, lowp>; + +template struct vec<1, uint8, mediump>; +template struct vec<1, uint16, mediump>; +template struct vec<1, uint32, mediump>; +template struct vec<1, uint64, mediump>; +template struct vec<1, int8, mediump>; +template struct vec<1, int16, mediump>; +template struct vec<1, int32, mediump>; +template struct vec<1, int64, mediump>; +template struct vec<1, float32, mediump>; +template struct vec<1, float64, mediump>; + +template struct vec<1, uint8, highp>; +template struct vec<1, uint16, highp>; +template struct vec<1, uint32, highp>; +template struct vec<1, uint64, highp>; +template struct vec<1, int8, highp>; +template struct vec<1, int16, highp>; +template struct vec<1, int32, highp>; +template struct vec<1, int64, highp>; +template struct vec<1, float32, highp>; +template struct vec<1, float64, highp>; + +// tvec2 type explicit instantiation +template struct vec<2, uint8, lowp>; +template struct vec<2, uint16, lowp>; +template struct vec<2, uint32, lowp>; +template struct vec<2, uint64, lowp>; +template struct vec<2, int8, lowp>; +template struct vec<2, int16, lowp>; +template struct vec<2, int32, lowp>; +template struct vec<2, int64, lowp>; +template struct vec<2, float32, lowp>; +template struct vec<2, float64, lowp>; + +template struct vec<2, uint8, mediump>; +template struct vec<2, uint16, mediump>; +template struct vec<2, uint32, mediump>; +template struct vec<2, uint64, mediump>; +template struct vec<2, int8, mediump>; +template struct vec<2, int16, mediump>; +template struct vec<2, int32, mediump>; +template struct vec<2, int64, mediump>; +template struct vec<2, float32, mediump>; +template struct vec<2, float64, mediump>; + +template struct vec<2, uint8, highp>; +template struct vec<2, uint16, highp>; +template struct vec<2, uint32, highp>; +template struct vec<2, uint64, highp>; +template struct vec<2, int8, highp>; +template struct vec<2, int16, highp>; +template struct vec<2, int32, highp>; +template struct vec<2, int64, highp>; +template struct vec<2, float32, highp>; +template struct vec<2, float64, highp>; + +// tvec3 type explicit instantiation +template struct vec<3, uint8, lowp>; +template struct vec<3, uint16, lowp>; +template struct vec<3, uint32, lowp>; +template struct vec<3, uint64, lowp>; +template struct vec<3, int8, lowp>; +template struct vec<3, int16, lowp>; +template struct vec<3, int32, lowp>; +template struct vec<3, int64, lowp>; +template struct vec<3, float32, lowp>; +template struct vec<3, float64, lowp>; + +template struct vec<3, uint8, mediump>; +template struct vec<3, uint16, mediump>; +template struct vec<3, uint32, mediump>; +template struct vec<3, uint64, mediump>; +template struct vec<3, int8, mediump>; +template struct vec<3, int16, mediump>; +template struct vec<3, int32, mediump>; +template struct vec<3, int64, mediump>; +template struct vec<3, float32, mediump>; +template struct vec<3, float64, mediump>; + +template struct vec<3, uint8, highp>; +template struct vec<3, uint16, highp>; +template struct vec<3, uint32, highp>; +template struct vec<3, uint64, highp>; +template struct vec<3, int8, highp>; +template struct vec<3, int16, highp>; +template struct vec<3, int32, highp>; +template struct vec<3, int64, highp>; +template struct vec<3, float32, highp>; +template struct vec<3, float64, highp>; + +// tvec4 type explicit instantiation +template struct vec<4, uint8, lowp>; +template struct vec<4, uint16, lowp>; +template struct vec<4, uint32, lowp>; +template struct vec<4, uint64, lowp>; +template struct vec<4, int8, lowp>; +template struct vec<4, int16, lowp>; +template struct vec<4, int32, lowp>; +template struct vec<4, int64, lowp>; +template struct vec<4, float32, lowp>; +template struct vec<4, float64, lowp>; + +template struct vec<4, uint8, mediump>; +template struct vec<4, uint16, mediump>; +template struct vec<4, uint32, mediump>; +template struct vec<4, uint64, mediump>; +template struct vec<4, int8, mediump>; +template struct vec<4, int16, mediump>; +template struct vec<4, int32, mediump>; +template struct vec<4, int64, mediump>; +template struct vec<4, float32, mediump>; +template struct vec<4, float64, mediump>; + +template struct vec<4, uint8, highp>; +template struct vec<4, uint16, highp>; +template struct vec<4, uint32, highp>; +template struct vec<4, uint64, highp>; +template struct vec<4, int8, highp>; +template struct vec<4, int16, highp>; +template struct vec<4, int32, highp>; +template struct vec<4, int64, highp>; +template struct vec<4, float32, highp>; +template struct vec<4, float64, highp>; + +// tmat2x2 type explicit instantiation +template struct mat<2, 2, float32, lowp>; +template struct mat<2, 2, float64, lowp>; + +template struct mat<2, 2, float32, mediump>; +template struct mat<2, 2, float64, mediump>; + +template struct mat<2, 2, float32, highp>; +template struct mat<2, 2, float64, highp>; + +// tmat2x3 type explicit instantiation +template struct mat<2, 3, float32, lowp>; +template struct mat<2, 3, float64, lowp>; + +template struct mat<2, 3, float32, mediump>; +template struct mat<2, 3, float64, mediump>; + +template struct mat<2, 3, float32, highp>; +template struct mat<2, 3, float64, highp>; + +// tmat2x4 type explicit instantiation +template struct mat<2, 4, float32, lowp>; +template struct mat<2, 4, float64, lowp>; + +template struct mat<2, 4, float32, mediump>; +template struct mat<2, 4, float64, mediump>; + +template struct mat<2, 4, float32, highp>; +template struct mat<2, 4, float64, highp>; + +// tmat3x2 type explicit instantiation +template struct mat<3, 2, float32, lowp>; +template struct mat<3, 2, float64, lowp>; + +template struct mat<3, 2, float32, mediump>; +template struct mat<3, 2, float64, mediump>; + +template struct mat<3, 2, float32, highp>; +template struct mat<3, 2, float64, highp>; + +// tmat3x3 type explicit instantiation +template struct mat<3, 3, float32, lowp>; +template struct mat<3, 3, float64, lowp>; + +template struct mat<3, 3, float32, mediump>; +template struct mat<3, 3, float64, mediump>; + +template struct mat<3, 3, float32, highp>; +template struct mat<3, 3, float64, highp>; + +// tmat3x4 type explicit instantiation +template struct mat<3, 4, float32, lowp>; +template struct mat<3, 4, float64, lowp>; + +template struct mat<3, 4, float32, mediump>; +template struct mat<3, 4, float64, mediump>; + +template struct mat<3, 4, float32, highp>; +template struct mat<3, 4, float64, highp>; + +// tmat4x2 type explicit instantiation +template struct mat<4, 2, float32, lowp>; +template struct mat<4, 2, float64, lowp>; + +template struct mat<4, 2, float32, mediump>; +template struct mat<4, 2, float64, mediump>; + +template struct mat<4, 2, float32, highp>; +template struct mat<4, 2, float64, highp>; + +// tmat4x3 type explicit instantiation +template struct mat<4, 3, float32, lowp>; +template struct mat<4, 3, float64, lowp>; + +template struct mat<4, 3, float32, mediump>; +template struct mat<4, 3, float64, mediump>; + +template struct mat<4, 3, float32, highp>; +template struct mat<4, 3, float64, highp>; + +// tmat4x4 type explicit instantiation +template struct mat<4, 4, float32, lowp>; +template struct mat<4, 4, float64, lowp>; + +template struct mat<4, 4, float32, mediump>; +template struct mat<4, 4, float64, mediump>; + +template struct mat<4, 4, float32, highp>; +template struct mat<4, 4, float64, highp>; + +// tquat type explicit instantiation +template struct qua; +template struct qua; + +template struct qua; +template struct qua; + +template struct qua; +template struct qua; + +//tdualquat type explicit instantiation +template struct tdualquat; +template struct tdualquat; + +template struct tdualquat; +template struct tdualquat; + +template struct tdualquat; +template struct tdualquat; + +}//namespace glm + diff --git a/MacroLibX/third_party/glm/detail/qualifier.hpp b/MacroLibX/third_party/glm/detail/qualifier.hpp new file mode 100644 index 0000000..b6c9df0 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/qualifier.hpp @@ -0,0 +1,230 @@ +#pragma once + +#include "setup.hpp" + +namespace glm +{ + /// Qualify GLM types in term of alignment (packed, aligned) and precision in term of ULPs (lowp, mediump, highp) + enum qualifier + { + packed_highp, ///< Typed data is tightly packed in memory and operations are executed with high precision in term of ULPs + packed_mediump, ///< Typed data is tightly packed in memory and operations are executed with medium precision in term of ULPs for higher performance + packed_lowp, ///< Typed data is tightly packed in memory and operations are executed with low precision in term of ULPs to maximize performance + +# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE + aligned_highp, ///< Typed data is aligned in memory allowing SIMD optimizations and operations are executed with high precision in term of ULPs + aligned_mediump, ///< Typed data is aligned in memory allowing SIMD optimizations and operations are executed with high precision in term of ULPs for higher performance + aligned_lowp, // ///< Typed data is aligned in memory allowing SIMD optimizations and operations are executed with high precision in term of ULPs to maximize performance + aligned = aligned_highp, ///< By default aligned qualifier is also high precision +# endif + + highp = packed_highp, ///< By default highp qualifier is also packed + mediump = packed_mediump, ///< By default mediump qualifier is also packed + lowp = packed_lowp, ///< By default lowp qualifier is also packed + packed = packed_highp, ///< By default packed qualifier is also high precision + +# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE && defined(GLM_FORCE_DEFAULT_ALIGNED_GENTYPES) + defaultp = aligned_highp +# else + defaultp = highp +# endif + }; + + typedef qualifier precision; + + template struct vec; + template struct mat; + template struct qua; + +# if GLM_HAS_TEMPLATE_ALIASES + template using tvec1 = vec<1, T, Q>; + template using tvec2 = vec<2, T, Q>; + template using tvec3 = vec<3, T, Q>; + template using tvec4 = vec<4, T, Q>; + template using tmat2x2 = mat<2, 2, T, Q>; + template using tmat2x3 = mat<2, 3, T, Q>; + template using tmat2x4 = mat<2, 4, T, Q>; + template using tmat3x2 = mat<3, 2, T, Q>; + template using tmat3x3 = mat<3, 3, T, Q>; + template using tmat3x4 = mat<3, 4, T, Q>; + template using tmat4x2 = mat<4, 2, T, Q>; + template using tmat4x3 = mat<4, 3, T, Q>; + template using tmat4x4 = mat<4, 4, T, Q>; + template using tquat = qua; +# endif + +namespace detail +{ + template + struct is_aligned + { + static const bool value = false; + }; + +# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE + template<> + struct is_aligned + { + static const bool value = true; + }; + + template<> + struct is_aligned + { + static const bool value = true; + }; + + template<> + struct is_aligned + { + static const bool value = true; + }; +# endif + + template + struct storage + { + typedef struct type { + T data[L]; + } type; + }; + +# if GLM_HAS_ALIGNOF + template + struct storage + { + typedef struct alignas(L * sizeof(T)) type { + T data[L]; + } type; + }; + + template + struct storage<3, T, true> + { + typedef struct alignas(4 * sizeof(T)) type { + T data[4]; + } type; + }; +# endif + +# if GLM_ARCH & GLM_ARCH_SSE2_BIT + template<> + struct storage<4, float, true> + { + typedef glm_f32vec4 type; + }; + + template<> + struct storage<4, int, true> + { + typedef glm_i32vec4 type; + }; + + template<> + struct storage<4, unsigned int, true> + { + typedef glm_u32vec4 type; + }; + + template<> + struct storage<2, double, true> + { + typedef glm_f64vec2 type; + }; + + template<> + struct storage<2, detail::int64, true> + { + typedef glm_i64vec2 type; + }; + + template<> + struct storage<2, detail::uint64, true> + { + typedef glm_u64vec2 type; + }; +# endif + +# if (GLM_ARCH & GLM_ARCH_AVX_BIT) + template<> + struct storage<4, double, true> + { + typedef glm_f64vec4 type; + }; +# endif + +# if (GLM_ARCH & GLM_ARCH_AVX2_BIT) + template<> + struct storage<4, detail::int64, true> + { + typedef glm_i64vec4 type; + }; + + template<> + struct storage<4, detail::uint64, true> + { + typedef glm_u64vec4 type; + }; +# endif + +# if GLM_ARCH & GLM_ARCH_NEON_BIT + template<> + struct storage<4, float, true> + { + typedef glm_f32vec4 type; + }; + + template<> + struct storage<4, int, true> + { + typedef glm_i32vec4 type; + }; + + template<> + struct storage<4, unsigned int, true> + { + typedef glm_u32vec4 type; + }; +# endif + + enum genTypeEnum + { + GENTYPE_VEC, + GENTYPE_MAT, + GENTYPE_QUAT + }; + + template + struct genTypeTrait + {}; + + template + struct genTypeTrait > + { + static const genTypeEnum GENTYPE = GENTYPE_MAT; + }; + + template + struct init_gentype + { + }; + + template + struct init_gentype + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static genType identity() + { + return genType(1, 0, 0, 0); + } + }; + + template + struct init_gentype + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static genType identity() + { + return genType(1); + } + }; +}//namespace detail +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/setup.hpp b/MacroLibX/third_party/glm/detail/setup.hpp new file mode 100644 index 0000000..07db656 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/setup.hpp @@ -0,0 +1,1135 @@ +#ifndef GLM_SETUP_INCLUDED + +#include +#include + +#define GLM_VERSION_MAJOR 0 +#define GLM_VERSION_MINOR 9 +#define GLM_VERSION_PATCH 9 +#define GLM_VERSION_REVISION 7 +#define GLM_VERSION 997 +#define GLM_VERSION_MESSAGE "GLM: version 0.9.9.7" + +#define GLM_SETUP_INCLUDED GLM_VERSION + +/////////////////////////////////////////////////////////////////////////////////// +// Active states + +#define GLM_DISABLE 0 +#define GLM_ENABLE 1 + +/////////////////////////////////////////////////////////////////////////////////// +// Messages + +#if defined(GLM_FORCE_MESSAGES) +# define GLM_MESSAGES GLM_ENABLE +#else +# define GLM_MESSAGES GLM_DISABLE +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Detect the platform + +#include "../simd/platform.h" + +/////////////////////////////////////////////////////////////////////////////////// +// Build model + +#if defined(_M_ARM64) || defined(__LP64__) || defined(_M_X64) || defined(__ppc64__) || defined(__x86_64__) +# define GLM_MODEL GLM_MODEL_64 +#elif defined(__i386__) || defined(__ppc__) || defined(__ILP32__) || defined(_M_ARM) +# define GLM_MODEL GLM_MODEL_32 +#else +# define GLM_MODEL GLM_MODEL_32 +#endif// + +#if !defined(GLM_MODEL) && GLM_COMPILER != 0 +# error "GLM_MODEL undefined, your compiler may not be supported by GLM. Add #define GLM_MODEL 0 to ignore this message." +#endif//GLM_MODEL + +/////////////////////////////////////////////////////////////////////////////////// +// C++ Version + +// User defines: GLM_FORCE_CXX98, GLM_FORCE_CXX03, GLM_FORCE_CXX11, GLM_FORCE_CXX14, GLM_FORCE_CXX17, GLM_FORCE_CXX2A + +#define GLM_LANG_CXX98_FLAG (1 << 1) +#define GLM_LANG_CXX03_FLAG (1 << 2) +#define GLM_LANG_CXX0X_FLAG (1 << 3) +#define GLM_LANG_CXX11_FLAG (1 << 4) +#define GLM_LANG_CXX14_FLAG (1 << 5) +#define GLM_LANG_CXX17_FLAG (1 << 6) +#define GLM_LANG_CXX2A_FLAG (1 << 7) +#define GLM_LANG_CXXMS_FLAG (1 << 8) +#define GLM_LANG_CXXGNU_FLAG (1 << 9) + +#define GLM_LANG_CXX98 GLM_LANG_CXX98_FLAG +#define GLM_LANG_CXX03 (GLM_LANG_CXX98 | GLM_LANG_CXX03_FLAG) +#define GLM_LANG_CXX0X (GLM_LANG_CXX03 | GLM_LANG_CXX0X_FLAG) +#define GLM_LANG_CXX11 (GLM_LANG_CXX0X | GLM_LANG_CXX11_FLAG) +#define GLM_LANG_CXX14 (GLM_LANG_CXX11 | GLM_LANG_CXX14_FLAG) +#define GLM_LANG_CXX17 (GLM_LANG_CXX14 | GLM_LANG_CXX17_FLAG) +#define GLM_LANG_CXX2A (GLM_LANG_CXX17 | GLM_LANG_CXX2A_FLAG) +#define GLM_LANG_CXXMS GLM_LANG_CXXMS_FLAG +#define GLM_LANG_CXXGNU GLM_LANG_CXXGNU_FLAG + +#if (defined(_MSC_EXTENSIONS)) +# define GLM_LANG_EXT GLM_LANG_CXXMS_FLAG +#elif ((GLM_COMPILER & (GLM_COMPILER_CLANG | GLM_COMPILER_GCC)) && (GLM_ARCH & GLM_ARCH_SIMD_BIT)) +# define GLM_LANG_EXT GLM_LANG_CXXMS_FLAG +#else +# define GLM_LANG_EXT 0 +#endif + +#if (defined(GLM_FORCE_CXX_UNKNOWN)) +# define GLM_LANG 0 +#elif defined(GLM_FORCE_CXX2A) +# define GLM_LANG (GLM_LANG_CXX2A | GLM_LANG_EXT) +# define GLM_LANG_STL11_FORCED +#elif defined(GLM_FORCE_CXX17) +# define GLM_LANG (GLM_LANG_CXX17 | GLM_LANG_EXT) +# define GLM_LANG_STL11_FORCED +#elif defined(GLM_FORCE_CXX14) +# define GLM_LANG (GLM_LANG_CXX14 | GLM_LANG_EXT) +# define GLM_LANG_STL11_FORCED +#elif defined(GLM_FORCE_CXX11) +# define GLM_LANG (GLM_LANG_CXX11 | GLM_LANG_EXT) +# define GLM_LANG_STL11_FORCED +#elif defined(GLM_FORCE_CXX03) +# define GLM_LANG (GLM_LANG_CXX03 | GLM_LANG_EXT) +#elif defined(GLM_FORCE_CXX98) +# define GLM_LANG (GLM_LANG_CXX98 | GLM_LANG_EXT) +#else +# if GLM_COMPILER & GLM_COMPILER_VC && defined(_MSVC_LANG) +# if GLM_COMPILER >= GLM_COMPILER_VC15_7 +# define GLM_LANG_PLATFORM _MSVC_LANG +# elif GLM_COMPILER >= GLM_COMPILER_VC15 +# if _MSVC_LANG > 201402L +# define GLM_LANG_PLATFORM 201402L +# else +# define GLM_LANG_PLATFORM _MSVC_LANG +# endif +# else +# define GLM_LANG_PLATFORM 0 +# endif +# else +# define GLM_LANG_PLATFORM 0 +# endif + +# if __cplusplus > 201703L || GLM_LANG_PLATFORM > 201703L +# define GLM_LANG (GLM_LANG_CXX2A | GLM_LANG_EXT) +# elif __cplusplus == 201703L || GLM_LANG_PLATFORM == 201703L +# define GLM_LANG (GLM_LANG_CXX17 | GLM_LANG_EXT) +# elif __cplusplus == 201402L || __cplusplus == 201500L || GLM_LANG_PLATFORM == 201402L +# define GLM_LANG (GLM_LANG_CXX14 | GLM_LANG_EXT) +# elif __cplusplus == 201103L || GLM_LANG_PLATFORM == 201103L +# define GLM_LANG (GLM_LANG_CXX11 | GLM_LANG_EXT) +# elif defined(__INTEL_CXX11_MODE__) || defined(_MSC_VER) || defined(__GXX_EXPERIMENTAL_CXX0X__) +# define GLM_LANG (GLM_LANG_CXX0X | GLM_LANG_EXT) +# elif __cplusplus == 199711L +# define GLM_LANG (GLM_LANG_CXX98 | GLM_LANG_EXT) +# else +# define GLM_LANG (0 | GLM_LANG_EXT) +# endif +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Has of C++ features + +// http://clang.llvm.org/cxx_status.html +// http://gcc.gnu.org/projects/cxx0x.html +// http://msdn.microsoft.com/en-us/library/vstudio/hh567368(v=vs.120).aspx + +// Android has multiple STLs but C++11 STL detection doesn't always work #284 #564 +#if GLM_PLATFORM == GLM_PLATFORM_ANDROID && !defined(GLM_LANG_STL11_FORCED) +# define GLM_HAS_CXX11_STL 0 +#elif GLM_COMPILER & GLM_COMPILER_CLANG +# if (defined(_LIBCPP_VERSION) || (GLM_LANG & GLM_LANG_CXX11_FLAG) || defined(GLM_LANG_STL11_FORCED)) +# define GLM_HAS_CXX11_STL 1 +# else +# define GLM_HAS_CXX11_STL 0 +# endif +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_CXX11_STL 1 +#else +# define GLM_HAS_CXX11_STL ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_GCC) && (GLM_COMPILER >= GLM_COMPILER_GCC48)) || \ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ + ((GLM_PLATFORM != GLM_PLATFORM_WINDOWS) && (GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL15)))) +#endif + +// N1720 +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_STATIC_ASSERT __has_feature(cxx_static_assert) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_STATIC_ASSERT 1 +#else +# define GLM_HAS_STATIC_ASSERT ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_CUDA)) || \ + ((GLM_COMPILER & GLM_COMPILER_VC)))) +#endif + +// N1988 +#if GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_EXTENDED_INTEGER_TYPE 1 +#else +# define GLM_HAS_EXTENDED_INTEGER_TYPE (\ + ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (GLM_COMPILER & GLM_COMPILER_VC)) || \ + ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (GLM_COMPILER & GLM_COMPILER_CUDA)) || \ + ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (GLM_COMPILER & GLM_COMPILER_CLANG))) +#endif + +// N2672 Initializer lists http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2672.htm +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_INITIALIZER_LISTS __has_feature(cxx_generalized_initializers) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_INITIALIZER_LISTS 1 +#else +# define GLM_HAS_INITIALIZER_LISTS ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC15)) || \ + ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL14)) || \ + ((GLM_COMPILER & GLM_COMPILER_CUDA)))) +#endif + +// N2544 Unrestricted unions http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2544.pdf +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_UNRESTRICTED_UNIONS __has_feature(cxx_unrestricted_unions) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_UNRESTRICTED_UNIONS 1 +#else +# define GLM_HAS_UNRESTRICTED_UNIONS (GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + (GLM_COMPILER & GLM_COMPILER_VC) || \ + ((GLM_COMPILER & GLM_COMPILER_CUDA))) +#endif + +// N2346 +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_DEFAULTED_FUNCTIONS __has_feature(cxx_defaulted_functions) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_DEFAULTED_FUNCTIONS 1 +#else +# define GLM_HAS_DEFAULTED_FUNCTIONS ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ + ((GLM_COMPILER & GLM_COMPILER_INTEL)) || \ + (GLM_COMPILER & GLM_COMPILER_CUDA))) +#endif + +// N2118 +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_RVALUE_REFERENCES __has_feature(cxx_rvalue_references) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_RVALUE_REFERENCES 1 +#else +# define GLM_HAS_RVALUE_REFERENCES ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_VC)) || \ + ((GLM_COMPILER & GLM_COMPILER_CUDA)))) +#endif + +// N2437 http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2437.pdf +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_EXPLICIT_CONVERSION_OPERATORS __has_feature(cxx_explicit_conversions) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_EXPLICIT_CONVERSION_OPERATORS 1 +#else +# define GLM_HAS_EXPLICIT_CONVERSION_OPERATORS ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL14)) || \ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ + ((GLM_COMPILER & GLM_COMPILER_CUDA)))) +#endif + +// N2258 http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2258.pdf +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_TEMPLATE_ALIASES __has_feature(cxx_alias_templates) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_TEMPLATE_ALIASES 1 +#else +# define GLM_HAS_TEMPLATE_ALIASES ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_INTEL)) || \ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ + ((GLM_COMPILER & GLM_COMPILER_CUDA)))) +#endif + +// N2930 http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2009/n2930.html +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_RANGE_FOR __has_feature(cxx_range_for) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_RANGE_FOR 1 +#else +# define GLM_HAS_RANGE_FOR ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_INTEL)) || \ + ((GLM_COMPILER & GLM_COMPILER_VC)) || \ + ((GLM_COMPILER & GLM_COMPILER_CUDA)))) +#endif + +// N2341 http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2341.pdf +#if GLM_COMPILER & GLM_COMPILER_CLANG +# define GLM_HAS_ALIGNOF __has_feature(cxx_alignas) +#elif GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_ALIGNOF 1 +#else +# define GLM_HAS_ALIGNOF ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL15)) || \ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC14)) || \ + ((GLM_COMPILER & GLM_COMPILER_CUDA)))) +#endif + +// N2235 Generalized Constant Expressions http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2235.pdf +// N3652 Extended Constant Expressions http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2013/n3652.html +#if (GLM_ARCH & GLM_ARCH_SIMD_BIT) // Compiler SIMD intrinsics don't support constexpr... +# define GLM_HAS_CONSTEXPR 0 +#elif (GLM_COMPILER & GLM_COMPILER_CLANG) +# define GLM_HAS_CONSTEXPR __has_feature(cxx_relaxed_constexpr) +#elif (GLM_LANG & GLM_LANG_CXX14_FLAG) +# define GLM_HAS_CONSTEXPR 1 +#else +# define GLM_HAS_CONSTEXPR ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && GLM_HAS_INITIALIZER_LISTS && (\ + ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL17)) || \ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC15)))) +#endif + +#if GLM_HAS_CONSTEXPR +# define GLM_CONSTEXPR constexpr +#else +# define GLM_CONSTEXPR +#endif + +// +#if GLM_HAS_CONSTEXPR +# if (GLM_COMPILER & GLM_COMPILER_CLANG) +# if __has_feature(cxx_if_constexpr) +# define GLM_HAS_IF_CONSTEXPR 1 +# else +# define GLM_HAS_IF_CONSTEXPR 0 +# endif +# elif (GLM_LANG & GLM_LANG_CXX17_FLAG) +# define GLM_HAS_IF_CONSTEXPR 1 +# else +# define GLM_HAS_IF_CONSTEXPR 0 +# endif +#else +# define GLM_HAS_IF_CONSTEXPR 0 +#endif + +#if GLM_HAS_IF_CONSTEXPR +# define GLM_IF_CONSTEXPR if constexpr +#else +# define GLM_IF_CONSTEXPR if +#endif + +// +#if GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_ASSIGNABLE 1 +#else +# define GLM_HAS_ASSIGNABLE ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC15)) || \ + ((GLM_COMPILER & GLM_COMPILER_GCC) && (GLM_COMPILER >= GLM_COMPILER_GCC49)))) +#endif + +// +#define GLM_HAS_TRIVIAL_QUERIES 0 + +// +#if GLM_LANG & GLM_LANG_CXX11_FLAG +# define GLM_HAS_MAKE_SIGNED 1 +#else +# define GLM_HAS_MAKE_SIGNED ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ + ((GLM_COMPILER & GLM_COMPILER_CUDA)))) +#endif + +// +#if defined(GLM_FORCE_INTRINSICS) +# define GLM_HAS_BITSCAN_WINDOWS ((GLM_PLATFORM & GLM_PLATFORM_WINDOWS) && (\ + ((GLM_COMPILER & GLM_COMPILER_INTEL)) || \ + ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC14) && (GLM_ARCH & GLM_ARCH_X86_BIT)))) +#else +# define GLM_HAS_BITSCAN_WINDOWS 0 +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// OpenMP +#ifdef _OPENMP +# if GLM_COMPILER & GLM_COMPILER_GCC +# if GLM_COMPILER >= GLM_COMPILER_GCC61 +# define GLM_HAS_OPENMP 45 +# elif GLM_COMPILER >= GLM_COMPILER_GCC49 +# define GLM_HAS_OPENMP 40 +# elif GLM_COMPILER >= GLM_COMPILER_GCC47 +# define GLM_HAS_OPENMP 31 +# else +# define GLM_HAS_OPENMP 0 +# endif +# elif GLM_COMPILER & GLM_COMPILER_CLANG +# if GLM_COMPILER >= GLM_COMPILER_CLANG38 +# define GLM_HAS_OPENMP 31 +# else +# define GLM_HAS_OPENMP 0 +# endif +# elif GLM_COMPILER & GLM_COMPILER_VC +# define GLM_HAS_OPENMP 20 +# elif GLM_COMPILER & GLM_COMPILER_INTEL +# if GLM_COMPILER >= GLM_COMPILER_INTEL16 +# define GLM_HAS_OPENMP 40 +# else +# define GLM_HAS_OPENMP 0 +# endif +# else +# define GLM_HAS_OPENMP 0 +# endif +#else +# define GLM_HAS_OPENMP 0 +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// nullptr + +#if GLM_LANG & GLM_LANG_CXX0X_FLAG +# define GLM_CONFIG_NULLPTR GLM_ENABLE +#else +# define GLM_CONFIG_NULLPTR GLM_DISABLE +#endif + +#if GLM_CONFIG_NULLPTR == GLM_ENABLE +# define GLM_NULLPTR nullptr +#else +# define GLM_NULLPTR 0 +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Static assert + +#if GLM_HAS_STATIC_ASSERT +# define GLM_STATIC_ASSERT(x, message) static_assert(x, message) +#elif GLM_COMPILER & GLM_COMPILER_VC +# define GLM_STATIC_ASSERT(x, message) typedef char __CASSERT__##__LINE__[(x) ? 1 : -1] +#else +# define GLM_STATIC_ASSERT(x, message) assert(x) +#endif//GLM_LANG + +/////////////////////////////////////////////////////////////////////////////////// +// Qualifiers + +#if GLM_COMPILER & GLM_COMPILER_CUDA +# define GLM_CUDA_FUNC_DEF __device__ __host__ +# define GLM_CUDA_FUNC_DECL __device__ __host__ +#else +# define GLM_CUDA_FUNC_DEF +# define GLM_CUDA_FUNC_DECL +#endif + +#if defined(GLM_FORCE_INLINE) +# if GLM_COMPILER & GLM_COMPILER_VC +# define GLM_INLINE __forceinline +# define GLM_NEVER_INLINE __declspec((noinline)) +# elif GLM_COMPILER & (GLM_COMPILER_GCC | GLM_COMPILER_CLANG) +# define GLM_INLINE inline __attribute__((__always_inline__)) +# define GLM_NEVER_INLINE __attribute__((__noinline__)) +# elif GLM_COMPILER & GLM_COMPILER_CUDA +# define GLM_INLINE __forceinline__ +# define GLM_NEVER_INLINE __noinline__ +# else +# define GLM_INLINE inline +# define GLM_NEVER_INLINE +# endif//GLM_COMPILER +#else +# define GLM_INLINE inline +# define GLM_NEVER_INLINE +#endif//defined(GLM_FORCE_INLINE) + +#define GLM_FUNC_DECL GLM_CUDA_FUNC_DECL +#define GLM_FUNC_QUALIFIER GLM_CUDA_FUNC_DEF GLM_INLINE + +/////////////////////////////////////////////////////////////////////////////////// +// Swizzle operators + +// User defines: GLM_FORCE_SWIZZLE + +#define GLM_SWIZZLE_DISABLED 0 +#define GLM_SWIZZLE_OPERATOR 1 +#define GLM_SWIZZLE_FUNCTION 2 + +#if defined(GLM_FORCE_XYZW_ONLY) +# undef GLM_FORCE_SWIZZLE +#endif + +#if defined(GLM_SWIZZLE) +# pragma message("GLM: GLM_SWIZZLE is deprecated, use GLM_FORCE_SWIZZLE instead.") +# define GLM_FORCE_SWIZZLE +#endif + +#if defined(GLM_FORCE_SWIZZLE) && (GLM_LANG & GLM_LANG_CXXMS_FLAG) +# define GLM_CONFIG_SWIZZLE GLM_SWIZZLE_OPERATOR +#elif defined(GLM_FORCE_SWIZZLE) +# define GLM_CONFIG_SWIZZLE GLM_SWIZZLE_FUNCTION +#else +# define GLM_CONFIG_SWIZZLE GLM_SWIZZLE_DISABLED +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Allows using not basic types as genType + +// #define GLM_FORCE_UNRESTRICTED_GENTYPE + +#ifdef GLM_FORCE_UNRESTRICTED_GENTYPE +# define GLM_CONFIG_UNRESTRICTED_GENTYPE GLM_ENABLE +#else +# define GLM_CONFIG_UNRESTRICTED_GENTYPE GLM_DISABLE +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Clip control, define GLM_FORCE_DEPTH_ZERO_TO_ONE before including GLM +// to use a clip space between 0 to 1. +// Coordinate system, define GLM_FORCE_LEFT_HANDED before including GLM +// to use left handed coordinate system by default. + +#define GLM_CLIP_CONTROL_ZO_BIT (1 << 0) // ZERO_TO_ONE +#define GLM_CLIP_CONTROL_NO_BIT (1 << 1) // NEGATIVE_ONE_TO_ONE +#define GLM_CLIP_CONTROL_LH_BIT (1 << 2) // LEFT_HANDED, For DirectX, Metal, Vulkan +#define GLM_CLIP_CONTROL_RH_BIT (1 << 3) // RIGHT_HANDED, For OpenGL, default in GLM + +#define GLM_CLIP_CONTROL_LH_ZO (GLM_CLIP_CONTROL_LH_BIT | GLM_CLIP_CONTROL_ZO_BIT) +#define GLM_CLIP_CONTROL_LH_NO (GLM_CLIP_CONTROL_LH_BIT | GLM_CLIP_CONTROL_NO_BIT) +#define GLM_CLIP_CONTROL_RH_ZO (GLM_CLIP_CONTROL_RH_BIT | GLM_CLIP_CONTROL_ZO_BIT) +#define GLM_CLIP_CONTROL_RH_NO (GLM_CLIP_CONTROL_RH_BIT | GLM_CLIP_CONTROL_NO_BIT) + +#ifdef GLM_FORCE_DEPTH_ZERO_TO_ONE +# ifdef GLM_FORCE_LEFT_HANDED +# define GLM_CONFIG_CLIP_CONTROL GLM_CLIP_CONTROL_LH_ZO +# else +# define GLM_CONFIG_CLIP_CONTROL GLM_CLIP_CONTROL_RH_ZO +# endif +#else +# ifdef GLM_FORCE_LEFT_HANDED +# define GLM_CONFIG_CLIP_CONTROL GLM_CLIP_CONTROL_LH_NO +# else +# define GLM_CONFIG_CLIP_CONTROL GLM_CLIP_CONTROL_RH_NO +# endif +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Qualifiers + +#if (GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS)) +# define GLM_DEPRECATED __declspec(deprecated) +# define GLM_ALIGNED_TYPEDEF(type, name, alignment) typedef __declspec(align(alignment)) type name +#elif GLM_COMPILER & (GLM_COMPILER_GCC | GLM_COMPILER_CLANG | GLM_COMPILER_INTEL) +# define GLM_DEPRECATED __attribute__((__deprecated__)) +# define GLM_ALIGNED_TYPEDEF(type, name, alignment) typedef type name __attribute__((aligned(alignment))) +#elif GLM_COMPILER & GLM_COMPILER_CUDA +# define GLM_DEPRECATED +# define GLM_ALIGNED_TYPEDEF(type, name, alignment) typedef type name __align__(x) +#else +# define GLM_DEPRECATED +# define GLM_ALIGNED_TYPEDEF(type, name, alignment) typedef type name +#endif + +/////////////////////////////////////////////////////////////////////////////////// + +#ifdef GLM_FORCE_EXPLICIT_CTOR +# define GLM_EXPLICIT explicit +#else +# define GLM_EXPLICIT +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// SYCL + +#if GLM_COMPILER==GLM_COMPILER_SYCL + +#include +#include + +namespace glm { +namespace std { + // Import SYCL's functions into the namespace glm::std to force their usages. + // It's important to use the math built-in function (sin, exp, ...) + // of SYCL instead the std ones. + using namespace cl::sycl; + + /////////////////////////////////////////////////////////////////////////////// + // Import some "harmless" std's stuffs used by glm into + // the new glm::std namespace. + template + using numeric_limits = ::std::numeric_limits; + + using ::std::size_t; + + using ::std::uint8_t; + using ::std::uint16_t; + using ::std::uint32_t; + using ::std::uint64_t; + + using ::std::int8_t; + using ::std::int16_t; + using ::std::int32_t; + using ::std::int64_t; + + using ::std::make_unsigned; + /////////////////////////////////////////////////////////////////////////////// +} //namespace std +} //namespace glm + +#endif + +/////////////////////////////////////////////////////////////////////////////////// + +/////////////////////////////////////////////////////////////////////////////////// +// Length type: all length functions returns a length_t type. +// When GLM_FORCE_SIZE_T_LENGTH is defined, length_t is a typedef of size_t otherwise +// length_t is a typedef of int like GLSL defines it. + +#define GLM_LENGTH_INT 1 +#define GLM_LENGTH_SIZE_T 2 + +#ifdef GLM_FORCE_SIZE_T_LENGTH +# define GLM_CONFIG_LENGTH_TYPE GLM_LENGTH_SIZE_T +#else +# define GLM_CONFIG_LENGTH_TYPE GLM_LENGTH_INT +#endif + +namespace glm +{ + using std::size_t; +# if GLM_CONFIG_LENGTH_TYPE == GLM_LENGTH_SIZE_T + typedef size_t length_t; +# else + typedef int length_t; +# endif +}//namespace glm + +/////////////////////////////////////////////////////////////////////////////////// +// constexpr + +#if GLM_HAS_CONSTEXPR +# define GLM_CONFIG_CONSTEXP GLM_ENABLE + + namespace glm + { + template + constexpr std::size_t countof(T const (&)[N]) + { + return N; + } + }//namespace glm +# define GLM_COUNTOF(arr) glm::countof(arr) +#elif defined(_MSC_VER) +# define GLM_CONFIG_CONSTEXP GLM_DISABLE + +# define GLM_COUNTOF(arr) _countof(arr) +#else +# define GLM_CONFIG_CONSTEXP GLM_DISABLE + +# define GLM_COUNTOF(arr) sizeof(arr) / sizeof(arr[0]) +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// uint + +namespace glm{ +namespace detail +{ + template + struct is_int + { + enum test {value = 0}; + }; + + template<> + struct is_int + { + enum test {value = ~0}; + }; + + template<> + struct is_int + { + enum test {value = ~0}; + }; +}//namespace detail + + typedef unsigned int uint; +}//namespace glm + +/////////////////////////////////////////////////////////////////////////////////// +// 64-bit int + +#if GLM_HAS_EXTENDED_INTEGER_TYPE +# include +#endif + +namespace glm{ +namespace detail +{ +# if GLM_HAS_EXTENDED_INTEGER_TYPE + typedef std::uint64_t uint64; + typedef std::int64_t int64; +# elif (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) // C99 detected, 64 bit types available + typedef uint64_t uint64; + typedef int64_t int64; +# elif GLM_COMPILER & GLM_COMPILER_VC + typedef unsigned __int64 uint64; + typedef signed __int64 int64; +# elif GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic ignored "-Wlong-long" + __extension__ typedef unsigned long long uint64; + __extension__ typedef signed long long int64; +# elif (GLM_COMPILER & GLM_COMPILER_CLANG) +# pragma clang diagnostic ignored "-Wc++11-long-long" + typedef unsigned long long uint64; + typedef signed long long int64; +# else//unknown compiler + typedef unsigned long long uint64; + typedef signed long long int64; +# endif +}//namespace detail +}//namespace glm + +/////////////////////////////////////////////////////////////////////////////////// +// make_unsigned + +#if GLM_HAS_MAKE_SIGNED +# include + +namespace glm{ +namespace detail +{ + using std::make_unsigned; +}//namespace detail +}//namespace glm + +#else + +namespace glm{ +namespace detail +{ + template + struct make_unsigned + {}; + + template<> + struct make_unsigned + { + typedef unsigned char type; + }; + + template<> + struct make_unsigned + { + typedef unsigned char type; + }; + + template<> + struct make_unsigned + { + typedef unsigned short type; + }; + + template<> + struct make_unsigned + { + typedef unsigned int type; + }; + + template<> + struct make_unsigned + { + typedef unsigned long type; + }; + + template<> + struct make_unsigned + { + typedef uint64 type; + }; + + template<> + struct make_unsigned + { + typedef unsigned char type; + }; + + template<> + struct make_unsigned + { + typedef unsigned short type; + }; + + template<> + struct make_unsigned + { + typedef unsigned int type; + }; + + template<> + struct make_unsigned + { + typedef unsigned long type; + }; + + template<> + struct make_unsigned + { + typedef uint64 type; + }; +}//namespace detail +}//namespace glm +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Only use x, y, z, w as vector type components + +#ifdef GLM_FORCE_XYZW_ONLY +# define GLM_CONFIG_XYZW_ONLY GLM_ENABLE +#else +# define GLM_CONFIG_XYZW_ONLY GLM_DISABLE +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Configure the use of defaulted initialized types + +#define GLM_CTOR_INIT_DISABLE 0 +#define GLM_CTOR_INITIALIZER_LIST 1 +#define GLM_CTOR_INITIALISATION 2 + +#if defined(GLM_FORCE_CTOR_INIT) && GLM_HAS_INITIALIZER_LISTS +# define GLM_CONFIG_CTOR_INIT GLM_CTOR_INITIALIZER_LIST +#elif defined(GLM_FORCE_CTOR_INIT) && !GLM_HAS_INITIALIZER_LISTS +# define GLM_CONFIG_CTOR_INIT GLM_CTOR_INITIALISATION +#else +# define GLM_CONFIG_CTOR_INIT GLM_CTOR_INIT_DISABLE +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Use SIMD instruction sets + +#if GLM_HAS_ALIGNOF && (GLM_LANG & GLM_LANG_CXXMS_FLAG) && (GLM_ARCH & GLM_ARCH_SIMD_BIT) +# define GLM_CONFIG_SIMD GLM_ENABLE +#else +# define GLM_CONFIG_SIMD GLM_DISABLE +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Configure the use of defaulted function + +#if GLM_HAS_DEFAULTED_FUNCTIONS && GLM_CONFIG_CTOR_INIT == GLM_CTOR_INIT_DISABLE +# define GLM_CONFIG_DEFAULTED_FUNCTIONS GLM_ENABLE +# define GLM_DEFAULT = default +#else +# define GLM_CONFIG_DEFAULTED_FUNCTIONS GLM_DISABLE +# define GLM_DEFAULT +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Configure the use of aligned gentypes + +#ifdef GLM_FORCE_ALIGNED // Legacy define +# define GLM_FORCE_DEFAULT_ALIGNED_GENTYPES +#endif + +#ifdef GLM_FORCE_DEFAULT_ALIGNED_GENTYPES +# define GLM_FORCE_ALIGNED_GENTYPES +#endif + +#if GLM_HAS_ALIGNOF && (GLM_LANG & GLM_LANG_CXXMS_FLAG) && (defined(GLM_FORCE_ALIGNED_GENTYPES) || (GLM_CONFIG_SIMD == GLM_ENABLE)) +# define GLM_CONFIG_ALIGNED_GENTYPES GLM_ENABLE +#else +# define GLM_CONFIG_ALIGNED_GENTYPES GLM_DISABLE +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Configure the use of anonymous structure as implementation detail + +#if ((GLM_CONFIG_SIMD == GLM_ENABLE) || (GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR) || (GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE)) +# define GLM_CONFIG_ANONYMOUS_STRUCT GLM_ENABLE +#else +# define GLM_CONFIG_ANONYMOUS_STRUCT GLM_DISABLE +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Silent warnings + +#ifdef GLM_FORCE_SILENT_WARNINGS +# define GLM_SILENT_WARNINGS GLM_ENABLE +#else +# define GLM_SILENT_WARNINGS GLM_DISABLE +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Precision + +#define GLM_HIGHP 1 +#define GLM_MEDIUMP 2 +#define GLM_LOWP 3 + +#if defined(GLM_FORCE_PRECISION_HIGHP_BOOL) || defined(GLM_PRECISION_HIGHP_BOOL) +# define GLM_CONFIG_PRECISION_BOOL GLM_HIGHP +#elif defined(GLM_FORCE_PRECISION_MEDIUMP_BOOL) || defined(GLM_PRECISION_MEDIUMP_BOOL) +# define GLM_CONFIG_PRECISION_BOOL GLM_MEDIUMP +#elif defined(GLM_FORCE_PRECISION_LOWP_BOOL) || defined(GLM_PRECISION_LOWP_BOOL) +# define GLM_CONFIG_PRECISION_BOOL GLM_LOWP +#else +# define GLM_CONFIG_PRECISION_BOOL GLM_HIGHP +#endif + +#if defined(GLM_FORCE_PRECISION_HIGHP_INT) || defined(GLM_PRECISION_HIGHP_INT) +# define GLM_CONFIG_PRECISION_INT GLM_HIGHP +#elif defined(GLM_FORCE_PRECISION_MEDIUMP_INT) || defined(GLM_PRECISION_MEDIUMP_INT) +# define GLM_CONFIG_PRECISION_INT GLM_MEDIUMP +#elif defined(GLM_FORCE_PRECISION_LOWP_INT) || defined(GLM_PRECISION_LOWP_INT) +# define GLM_CONFIG_PRECISION_INT GLM_LOWP +#else +# define GLM_CONFIG_PRECISION_INT GLM_HIGHP +#endif + +#if defined(GLM_FORCE_PRECISION_HIGHP_UINT) || defined(GLM_PRECISION_HIGHP_UINT) +# define GLM_CONFIG_PRECISION_UINT GLM_HIGHP +#elif defined(GLM_FORCE_PRECISION_MEDIUMP_UINT) || defined(GLM_PRECISION_MEDIUMP_UINT) +# define GLM_CONFIG_PRECISION_UINT GLM_MEDIUMP +#elif defined(GLM_FORCE_PRECISION_LOWP_UINT) || defined(GLM_PRECISION_LOWP_UINT) +# define GLM_CONFIG_PRECISION_UINT GLM_LOWP +#else +# define GLM_CONFIG_PRECISION_UINT GLM_HIGHP +#endif + +#if defined(GLM_FORCE_PRECISION_HIGHP_FLOAT) || defined(GLM_PRECISION_HIGHP_FLOAT) +# define GLM_CONFIG_PRECISION_FLOAT GLM_HIGHP +#elif defined(GLM_FORCE_PRECISION_MEDIUMP_FLOAT) || defined(GLM_PRECISION_MEDIUMP_FLOAT) +# define GLM_CONFIG_PRECISION_FLOAT GLM_MEDIUMP +#elif defined(GLM_FORCE_PRECISION_LOWP_FLOAT) || defined(GLM_PRECISION_LOWP_FLOAT) +# define GLM_CONFIG_PRECISION_FLOAT GLM_LOWP +#else +# define GLM_CONFIG_PRECISION_FLOAT GLM_HIGHP +#endif + +#if defined(GLM_FORCE_PRECISION_HIGHP_DOUBLE) || defined(GLM_PRECISION_HIGHP_DOUBLE) +# define GLM_CONFIG_PRECISION_DOUBLE GLM_HIGHP +#elif defined(GLM_FORCE_PRECISION_MEDIUMP_DOUBLE) || defined(GLM_PRECISION_MEDIUMP_DOUBLE) +# define GLM_CONFIG_PRECISION_DOUBLE GLM_MEDIUMP +#elif defined(GLM_FORCE_PRECISION_LOWP_DOUBLE) || defined(GLM_PRECISION_LOWP_DOUBLE) +# define GLM_CONFIG_PRECISION_DOUBLE GLM_LOWP +#else +# define GLM_CONFIG_PRECISION_DOUBLE GLM_HIGHP +#endif + +/////////////////////////////////////////////////////////////////////////////////// +// Check inclusions of different versions of GLM + +#elif ((GLM_SETUP_INCLUDED != GLM_VERSION) && !defined(GLM_FORCE_IGNORE_VERSION)) +# error "GLM error: A different version of GLM is already included. Define GLM_FORCE_IGNORE_VERSION before including GLM headers to ignore this error." +#elif GLM_SETUP_INCLUDED == GLM_VERSION + +/////////////////////////////////////////////////////////////////////////////////// +// Messages + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_MESSAGE_DISPLAYED) +# define GLM_MESSAGE_DISPLAYED +# define GLM_STR_HELPER(x) #x +# define GLM_STR(x) GLM_STR_HELPER(x) + + // Report GLM version +# pragma message (GLM_STR(GLM_VERSION_MESSAGE)) + + // Report C++ language +# if (GLM_LANG & GLM_LANG_CXX2A_FLAG) && (GLM_LANG & GLM_LANG_EXT) +# pragma message("GLM: C++ 2A with extensions") +# elif (GLM_LANG & GLM_LANG_CXX2A_FLAG) +# pragma message("GLM: C++ 2A") +# elif (GLM_LANG & GLM_LANG_CXX17_FLAG) && (GLM_LANG & GLM_LANG_EXT) +# pragma message("GLM: C++ 17 with extensions") +# elif (GLM_LANG & GLM_LANG_CXX17_FLAG) +# pragma message("GLM: C++ 17") +# elif (GLM_LANG & GLM_LANG_CXX14_FLAG) && (GLM_LANG & GLM_LANG_EXT) +# pragma message("GLM: C++ 14 with extensions") +# elif (GLM_LANG & GLM_LANG_CXX14_FLAG) +# pragma message("GLM: C++ 14") +# elif (GLM_LANG & GLM_LANG_CXX11_FLAG) && (GLM_LANG & GLM_LANG_EXT) +# pragma message("GLM: C++ 11 with extensions") +# elif (GLM_LANG & GLM_LANG_CXX11_FLAG) +# pragma message("GLM: C++ 11") +# elif (GLM_LANG & GLM_LANG_CXX0X_FLAG) && (GLM_LANG & GLM_LANG_EXT) +# pragma message("GLM: C++ 0x with extensions") +# elif (GLM_LANG & GLM_LANG_CXX0X_FLAG) +# pragma message("GLM: C++ 0x") +# elif (GLM_LANG & GLM_LANG_CXX03_FLAG) && (GLM_LANG & GLM_LANG_EXT) +# pragma message("GLM: C++ 03 with extensions") +# elif (GLM_LANG & GLM_LANG_CXX03_FLAG) +# pragma message("GLM: C++ 03") +# elif (GLM_LANG & GLM_LANG_CXX98_FLAG) && (GLM_LANG & GLM_LANG_EXT) +# pragma message("GLM: C++ 98 with extensions") +# elif (GLM_LANG & GLM_LANG_CXX98_FLAG) +# pragma message("GLM: C++ 98") +# else +# pragma message("GLM: C++ language undetected") +# endif//GLM_LANG + + // Report compiler detection +# if GLM_COMPILER & GLM_COMPILER_CUDA +# pragma message("GLM: CUDA compiler detected") +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma message("GLM: Visual C++ compiler detected") +# elif GLM_COMPILER & GLM_COMPILER_CLANG +# pragma message("GLM: Clang compiler detected") +# elif GLM_COMPILER & GLM_COMPILER_INTEL +# pragma message("GLM: Intel Compiler detected") +# elif GLM_COMPILER & GLM_COMPILER_GCC +# pragma message("GLM: GCC compiler detected") +# else +# pragma message("GLM: Compiler not detected") +# endif + + // Report build target +# if (GLM_ARCH & GLM_ARCH_AVX2_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: x86 64 bits with AVX2 instruction set build target") +# elif (GLM_ARCH & GLM_ARCH_AVX2_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: x86 32 bits with AVX2 instruction set build target") + +# elif (GLM_ARCH & GLM_ARCH_AVX_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: x86 64 bits with AVX instruction set build target") +# elif (GLM_ARCH & GLM_ARCH_AVX_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: x86 32 bits with AVX instruction set build target") + +# elif (GLM_ARCH & GLM_ARCH_SSE42_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: x86 64 bits with SSE4.2 instruction set build target") +# elif (GLM_ARCH & GLM_ARCH_SSE42_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: x86 32 bits with SSE4.2 instruction set build target") + +# elif (GLM_ARCH & GLM_ARCH_SSE41_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: x86 64 bits with SSE4.1 instruction set build target") +# elif (GLM_ARCH & GLM_ARCH_SSE41_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: x86 32 bits with SSE4.1 instruction set build target") + +# elif (GLM_ARCH & GLM_ARCH_SSSE3_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: x86 64 bits with SSSE3 instruction set build target") +# elif (GLM_ARCH & GLM_ARCH_SSSE3_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: x86 32 bits with SSSE3 instruction set build target") + +# elif (GLM_ARCH & GLM_ARCH_SSE3_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: x86 64 bits with SSE3 instruction set build target") +# elif (GLM_ARCH & GLM_ARCH_SSE3_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: x86 32 bits with SSE3 instruction set build target") + +# elif (GLM_ARCH & GLM_ARCH_SSE2_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: x86 64 bits with SSE2 instruction set build target") +# elif (GLM_ARCH & GLM_ARCH_SSE2_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: x86 32 bits with SSE2 instruction set build target") + +# elif (GLM_ARCH & GLM_ARCH_X86_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: x86 64 bits build target") +# elif (GLM_ARCH & GLM_ARCH_X86_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: x86 32 bits build target") + +# elif (GLM_ARCH & GLM_ARCH_NEON_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: ARM 64 bits with Neon instruction set build target") +# elif (GLM_ARCH & GLM_ARCH_NEON_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: ARM 32 bits with Neon instruction set build target") + +# elif (GLM_ARCH & GLM_ARCH_ARM_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: ARM 64 bits build target") +# elif (GLM_ARCH & GLM_ARCH_ARM_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: ARM 32 bits build target") + +# elif (GLM_ARCH & GLM_ARCH_MIPS_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: MIPS 64 bits build target") +# elif (GLM_ARCH & GLM_ARCH_MIPS_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: MIPS 32 bits build target") + +# elif (GLM_ARCH & GLM_ARCH_PPC_BIT) && (GLM_MODEL == GLM_MODEL_64) +# pragma message("GLM: PowerPC 64 bits build target") +# elif (GLM_ARCH & GLM_ARCH_PPC_BIT) && (GLM_MODEL == GLM_MODEL_32) +# pragma message("GLM: PowerPC 32 bits build target") +# else +# pragma message("GLM: Unknown build target") +# endif//GLM_ARCH + + // Report platform name +# if(GLM_PLATFORM & GLM_PLATFORM_QNXNTO) +# pragma message("GLM: QNX platform detected") +//# elif(GLM_PLATFORM & GLM_PLATFORM_IOS) +//# pragma message("GLM: iOS platform detected") +# elif(GLM_PLATFORM & GLM_PLATFORM_APPLE) +# pragma message("GLM: Apple platform detected") +# elif(GLM_PLATFORM & GLM_PLATFORM_WINCE) +# pragma message("GLM: WinCE platform detected") +# elif(GLM_PLATFORM & GLM_PLATFORM_WINDOWS) +# pragma message("GLM: Windows platform detected") +# elif(GLM_PLATFORM & GLM_PLATFORM_CHROME_NACL) +# pragma message("GLM: Native Client detected") +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) +# pragma message("GLM: Android platform detected") +# elif(GLM_PLATFORM & GLM_PLATFORM_LINUX) +# pragma message("GLM: Linux platform detected") +# elif(GLM_PLATFORM & GLM_PLATFORM_UNIX) +# pragma message("GLM: UNIX platform detected") +# elif(GLM_PLATFORM & GLM_PLATFORM_UNKNOWN) +# pragma message("GLM: platform unknown") +# else +# pragma message("GLM: platform not detected") +# endif + + // Report whether only xyzw component are used +# if defined GLM_FORCE_XYZW_ONLY +# pragma message("GLM: GLM_FORCE_XYZW_ONLY is defined. Only x, y, z and w component are available in vector type. This define disables swizzle operators and SIMD instruction sets.") +# endif + + // Report swizzle operator support +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR +# pragma message("GLM: GLM_FORCE_SWIZZLE is defined, swizzling operators enabled.") +# elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION +# pragma message("GLM: GLM_FORCE_SWIZZLE is defined, swizzling functions enabled. Enable compiler C++ language extensions to enable swizzle operators.") +# else +# pragma message("GLM: GLM_FORCE_SWIZZLE is undefined. swizzling functions or operators are disabled.") +# endif + + // Report .length() type +# if GLM_CONFIG_LENGTH_TYPE == GLM_LENGTH_SIZE_T +# pragma message("GLM: GLM_FORCE_SIZE_T_LENGTH is defined. .length() returns a glm::length_t, a typedef of std::size_t.") +# else +# pragma message("GLM: GLM_FORCE_SIZE_T_LENGTH is undefined. .length() returns a glm::length_t, a typedef of int following GLSL.") +# endif + +# if GLM_CONFIG_UNRESTRICTED_GENTYPE == GLM_ENABLE +# pragma message("GLM: GLM_FORCE_UNRESTRICTED_GENTYPE is defined. Removes GLSL restrictions on valid function genTypes.") +# else +# pragma message("GLM: GLM_FORCE_UNRESTRICTED_GENTYPE is undefined. Follows strictly GLSL on valid function genTypes.") +# endif + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# pragma message("GLM: GLM_FORCE_SILENT_WARNINGS is defined. Ignores C++ warnings from using C++ language extensions.") +# else +# pragma message("GLM: GLM_FORCE_SILENT_WARNINGS is undefined. Shows C++ warnings from using C++ language extensions.") +# endif + +# ifdef GLM_FORCE_SINGLE_ONLY +# pragma message("GLM: GLM_FORCE_SINGLE_ONLY is defined. Using only single precision floating-point types.") +# endif + +# if defined(GLM_FORCE_ALIGNED_GENTYPES) && (GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE) +# undef GLM_FORCE_ALIGNED_GENTYPES +# pragma message("GLM: GLM_FORCE_ALIGNED_GENTYPES is defined, allowing aligned types. This prevents the use of C++ constexpr.") +# elif defined(GLM_FORCE_ALIGNED_GENTYPES) && (GLM_CONFIG_ALIGNED_GENTYPES == GLM_DISABLE) +# undef GLM_FORCE_ALIGNED_GENTYPES +# pragma message("GLM: GLM_FORCE_ALIGNED_GENTYPES is defined but is disabled. It requires C++11 and language extensions.") +# endif + +# if defined(GLM_FORCE_DEFAULT_ALIGNED_GENTYPES) +# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_DISABLE +# undef GLM_FORCE_DEFAULT_ALIGNED_GENTYPES +# pragma message("GLM: GLM_FORCE_DEFAULT_ALIGNED_GENTYPES is defined but is disabled. It requires C++11 and language extensions.") +# elif GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE +# pragma message("GLM: GLM_FORCE_DEFAULT_ALIGNED_GENTYPES is defined. All gentypes (e.g. vec3) will be aligned and padded by default.") +# endif +# endif + +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT +# pragma message("GLM: GLM_FORCE_DEPTH_ZERO_TO_ONE is defined. Using zero to one depth clip space.") +# else +# pragma message("GLM: GLM_FORCE_DEPTH_ZERO_TO_ONE is undefined. Using negative one to one depth clip space.") +# endif + +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT +# pragma message("GLM: GLM_FORCE_LEFT_HANDED is defined. Using left handed coordinate system.") +# else +# pragma message("GLM: GLM_FORCE_LEFT_HANDED is undefined. Using right handed coordinate system.") +# endif +#endif//GLM_MESSAGES + +#endif//GLM_SETUP_INCLUDED diff --git a/MacroLibX/third_party/glm/detail/type_float.hpp b/MacroLibX/third_party/glm/detail/type_float.hpp new file mode 100644 index 0000000..c8037eb --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_float.hpp @@ -0,0 +1,68 @@ +#pragma once + +#include "setup.hpp" + +#if GLM_COMPILER == GLM_COMPILER_VC12 +# pragma warning(push) +# pragma warning(disable: 4512) // assignment operator could not be generated +#endif + +namespace glm{ +namespace detail +{ + template + union float_t + {}; + + // https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/ + template <> + union float_t + { + typedef int int_type; + typedef float float_type; + + GLM_CONSTEXPR float_t(float_type Num = 0.0f) : f(Num) {} + + GLM_CONSTEXPR float_t& operator=(float_t const& x) + { + f = x.f; + return *this; + } + + // Portable extraction of components. + GLM_CONSTEXPR bool negative() const { return i < 0; } + GLM_CONSTEXPR int_type mantissa() const { return i & ((1 << 23) - 1); } + GLM_CONSTEXPR int_type exponent() const { return (i >> 23) & ((1 << 8) - 1); } + + int_type i; + float_type f; + }; + + template <> + union float_t + { + typedef detail::int64 int_type; + typedef double float_type; + + GLM_CONSTEXPR float_t(float_type Num = static_cast(0)) : f(Num) {} + + GLM_CONSTEXPR float_t& operator=(float_t const& x) + { + f = x.f; + return *this; + } + + // Portable extraction of components. + GLM_CONSTEXPR bool negative() const { return i < 0; } + GLM_CONSTEXPR int_type mantissa() const { return i & ((int_type(1) << 52) - 1); } + GLM_CONSTEXPR int_type exponent() const { return (i >> 52) & ((int_type(1) << 11) - 1); } + + int_type i; + float_type f; + }; +}//namespace detail +}//namespace glm + +#if GLM_COMPILER == GLM_COMPILER_VC12 +# pragma warning(pop) +#endif diff --git a/MacroLibX/third_party/glm/detail/type_half.hpp b/MacroLibX/third_party/glm/detail/type_half.hpp new file mode 100644 index 0000000..40b8bec --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_half.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include "setup.hpp" + +namespace glm{ +namespace detail +{ + typedef short hdata; + + GLM_FUNC_DECL float toFloat32(hdata value); + GLM_FUNC_DECL hdata toFloat16(float const& value); + +}//namespace detail +}//namespace glm + +#include "type_half.inl" diff --git a/MacroLibX/third_party/glm/detail/type_half.inl b/MacroLibX/third_party/glm/detail/type_half.inl new file mode 100644 index 0000000..b0723e3 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_half.inl @@ -0,0 +1,241 @@ +namespace glm{ +namespace detail +{ + GLM_FUNC_QUALIFIER float overflow() + { + volatile float f = 1e10; + + for(int i = 0; i < 10; ++i) + f *= f; // this will overflow before the for loop terminates + return f; + } + + union uif32 + { + GLM_FUNC_QUALIFIER uif32() : + i(0) + {} + + GLM_FUNC_QUALIFIER uif32(float f_) : + f(f_) + {} + + GLM_FUNC_QUALIFIER uif32(unsigned int i_) : + i(i_) + {} + + float f; + unsigned int i; + }; + + GLM_FUNC_QUALIFIER float toFloat32(hdata value) + { + int s = (value >> 15) & 0x00000001; + int e = (value >> 10) & 0x0000001f; + int m = value & 0x000003ff; + + if(e == 0) + { + if(m == 0) + { + // + // Plus or minus zero + // + + detail::uif32 result; + result.i = static_cast(s << 31); + return result.f; + } + else + { + // + // Denormalized number -- renormalize it + // + + while(!(m & 0x00000400)) + { + m <<= 1; + e -= 1; + } + + e += 1; + m &= ~0x00000400; + } + } + else if(e == 31) + { + if(m == 0) + { + // + // Positive or negative infinity + // + + uif32 result; + result.i = static_cast((s << 31) | 0x7f800000); + return result.f; + } + else + { + // + // Nan -- preserve sign and significand bits + // + + uif32 result; + result.i = static_cast((s << 31) | 0x7f800000 | (m << 13)); + return result.f; + } + } + + // + // Normalized number + // + + e = e + (127 - 15); + m = m << 13; + + // + // Assemble s, e and m. + // + + uif32 Result; + Result.i = static_cast((s << 31) | (e << 23) | m); + return Result.f; + } + + GLM_FUNC_QUALIFIER hdata toFloat16(float const& f) + { + uif32 Entry; + Entry.f = f; + int i = static_cast(Entry.i); + + // + // Our floating point number, f, is represented by the bit + // pattern in integer i. Disassemble that bit pattern into + // the sign, s, the exponent, e, and the significand, m. + // Shift s into the position where it will go in the + // resulting half number. + // Adjust e, accounting for the different exponent bias + // of float and half (127 versus 15). + // + + int s = (i >> 16) & 0x00008000; + int e = ((i >> 23) & 0x000000ff) - (127 - 15); + int m = i & 0x007fffff; + + // + // Now reassemble s, e and m into a half: + // + + if(e <= 0) + { + if(e < -10) + { + // + // E is less than -10. The absolute value of f is + // less than half_MIN (f may be a small normalized + // float, a denormalized float or a zero). + // + // We convert f to a half zero. + // + + return hdata(s); + } + + // + // E is between -10 and 0. F is a normalized float, + // whose magnitude is less than __half_NRM_MIN. + // + // We convert f to a denormalized half. + // + + m = (m | 0x00800000) >> (1 - e); + + // + // Round to nearest, round "0.5" up. + // + // Rounding may cause the significand to overflow and make + // our number normalized. Because of the way a half's bits + // are laid out, we don't have to treat this case separately; + // the code below will handle it correctly. + // + + if(m & 0x00001000) + m += 0x00002000; + + // + // Assemble the half from s, e (zero) and m. + // + + return hdata(s | (m >> 13)); + } + else if(e == 0xff - (127 - 15)) + { + if(m == 0) + { + // + // F is an infinity; convert f to a half + // infinity with the same sign as f. + // + + return hdata(s | 0x7c00); + } + else + { + // + // F is a NAN; we produce a half NAN that preserves + // the sign bit and the 10 leftmost bits of the + // significand of f, with one exception: If the 10 + // leftmost bits are all zero, the NAN would turn + // into an infinity, so we have to set at least one + // bit in the significand. + // + + m >>= 13; + + return hdata(s | 0x7c00 | m | (m == 0)); + } + } + else + { + // + // E is greater than zero. F is a normalized float. + // We try to convert f to a normalized half. + // + + // + // Round to nearest, round "0.5" up + // + + if(m & 0x00001000) + { + m += 0x00002000; + + if(m & 0x00800000) + { + m = 0; // overflow in significand, + e += 1; // adjust exponent + } + } + + // + // Handle exponent overflow + // + + if (e > 30) + { + overflow(); // Cause a hardware floating point overflow; + + return hdata(s | 0x7c00); + // if this returns, the half becomes an + } // infinity with the same sign as f. + + // + // Assemble the half from s, e and m. + // + + return hdata(s | (e << 10) | (m >> 13)); + } + } + +}//namespace detail +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat2x2.hpp b/MacroLibX/third_party/glm/detail/type_mat2x2.hpp new file mode 100644 index 0000000..033908f --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat2x2.hpp @@ -0,0 +1,177 @@ +/// @ref core +/// @file glm/detail/type_mat2x2.hpp + +#pragma once + +#include "type_vec2.hpp" +#include +#include + +namespace glm +{ + template + struct mat<2, 2, T, Q> + { + typedef vec<2, T, Q> col_type; + typedef vec<2, T, Q> row_type; + typedef mat<2, 2, T, Q> type; + typedef mat<2, 2, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[2]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 2; } + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<2, 2, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T const& x1, T const& y1, + T const& x2, T const& y2); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v1, + col_type const& v2); + + // -- Conversions -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + U const& x1, V const& y1, + M const& x2, N const& y2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<2, U, Q> const& v1, + vec<2, V, Q> const& v2); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator=(mat<2, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator+=(mat<2, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator-=(mat<2, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator*=(mat<2, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator/=(U s); + template + GLM_FUNC_DECL mat<2, 2, T, Q> & operator/=(mat<2, 2, U, Q> const& m); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<2, 2, T, Q> & operator++ (); + GLM_FUNC_DECL mat<2, 2, T, Q> & operator-- (); + GLM_FUNC_DECL mat<2, 2, T, Q> operator++(int); + GLM_FUNC_DECL mat<2, 2, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator+(T scalar, mat<2, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator-(T scalar, mat<2, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator*(mat<2, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator*(T scalar, mat<2, 2, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<2, 2, T, Q>::col_type operator*(mat<2, 2, T, Q> const& m, typename mat<2, 2, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<2, 2, T, Q>::row_type operator*(typename mat<2, 2, T, Q>::col_type const& v, mat<2, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator/(mat<2, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator/(T scalar, mat<2, 2, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<2, 2, T, Q>::col_type operator/(mat<2, 2, T, Q> const& m, typename mat<2, 2, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<2, 2, T, Q>::row_type operator/(typename mat<2, 2, T, Q>::col_type const& v, mat<2, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator/(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); +} //namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat2x2.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_mat2x2.inl b/MacroLibX/third_party/glm/detail/type_mat2x2.inl new file mode 100644 index 0000000..fe5d1aa --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat2x2.inl @@ -0,0 +1,536 @@ +#include "../matrix.hpp" + +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0), col_type(0, 1)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0); + this->value[1] = col_type(0, 1); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<2, 2, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{m[0], m[1]} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(T scalar) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(scalar, 0), col_type(0, scalar)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(scalar, 0); + this->value[1] = col_type(0, scalar); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat + ( + T const& x0, T const& y0, + T const& x1, T const& y1 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0), col_type(x1, y1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0); + this->value[1] = col_type(x1, y1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(col_type const& v0, col_type const& v1) +# if GLM_HAS_INITIALIZER_LISTS + : value{v0, v1} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = v0; + this->value[1] = v1; +# endif + } + + // -- Conversion constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat + ( + X1 const& x1, Y1 const& y1, + X2 const& x2, Y2 const& y2 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(static_cast(x1), value_type(y1)), col_type(static_cast(x2), value_type(y2)) } +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(static_cast(x1), value_type(y1)); + this->value[1] = col_type(static_cast(x2), value_type(y2)); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(vec<2, V1, Q> const& v1, vec<2, V2, Q> const& v2) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v1), col_type(v2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v1); + this->value[1] = col_type(v2); +# endif + } + + // -- mat2x2 matrix conversions -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<2, 2, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<3, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<4, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<2, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<3, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<2, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<4, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<3, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<4, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::col_type& mat<2, 2, T, Q>::operator[](typename mat<2, 2, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<2, 2, T, Q>::col_type const& mat<2, 2, T, Q>::operator[](typename mat<2, 2, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary updatable operators -- + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator=(mat<2, 2, U, Q> const& m) + { + this->value[0] = m[0]; + this->value[1] = m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator+=(U scalar) + { + this->value[0] += scalar; + this->value[1] += scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator+=(mat<2, 2, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator-=(U scalar) + { + this->value[0] -= scalar; + this->value[1] -= scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator-=(mat<2, 2, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator*=(U scalar) + { + this->value[0] *= scalar; + this->value[1] *= scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator*=(mat<2, 2, U, Q> const& m) + { + return (*this = *this * m); + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator/=(U scalar) + { + this->value[0] /= scalar; + this->value[1] /= scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator/=(mat<2, 2, U, Q> const& m) + { + return *this *= inverse(m); + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> mat<2, 2, T, Q>::operator++(int) + { + mat<2, 2, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> mat<2, 2, T, Q>::operator--(int) + { + mat<2, 2, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m) + { + return mat<2, 2, T, Q>( + -m[0], + -m[1]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m, T scalar) + { + return mat<2, 2, T, Q>( + m[0] + scalar, + m[1] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator+(T scalar, mat<2, 2, T, Q> const& m) + { + return mat<2, 2, T, Q>( + m[0] + scalar, + m[1] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) + { + return mat<2, 2, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m, T scalar) + { + return mat<2, 2, T, Q>( + m[0] - scalar, + m[1] - scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator-(T scalar, mat<2, 2, T, Q> const& m) + { + return mat<2, 2, T, Q>( + scalar - m[0], + scalar - m[1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) + { + return mat<2, 2, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(mat<2, 2, T, Q> const& m, T scalar) + { + return mat<2, 2, T, Q>( + m[0] * scalar, + m[1] * scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(T scalar, mat<2, 2, T, Q> const& m) + { + return mat<2, 2, T, Q>( + m[0] * scalar, + m[1] * scalar); + } + + template + GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::col_type operator* + ( + mat<2, 2, T, Q> const& m, + typename mat<2, 2, T, Q>::row_type const& v + ) + { + return vec<2, T, Q>( + m[0][0] * v.x + m[1][0] * v.y, + m[0][1] * v.x + m[1][1] * v.y); + } + + template + GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::row_type operator* + ( + typename mat<2, 2, T, Q>::col_type const& v, + mat<2, 2, T, Q> const& m + ) + { + return vec<2, T, Q>( + v.x * m[0][0] + v.y * m[0][1], + v.x * m[1][0] + v.y * m[1][1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) + { + return mat<2, 2, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) + { + return mat<3, 2, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) + { + return mat<4, 2, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1], + m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1], + m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator/(mat<2, 2, T, Q> const& m, T scalar) + { + return mat<2, 2, T, Q>( + m[0] / scalar, + m[1] / scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator/(T scalar, mat<2, 2, T, Q> const& m) + { + return mat<2, 2, T, Q>( + scalar / m[0], + scalar / m[1]); + } + + template + GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::col_type operator/(mat<2, 2, T, Q> const& m, typename mat<2, 2, T, Q>::row_type const& v) + { + return inverse(m) * v; + } + + template + GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::row_type operator/(typename mat<2, 2, T, Q>::col_type const& v, mat<2, 2, T, Q> const& m) + { + return v * inverse(m); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator/(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) + { + mat<2, 2, T, Q> m1_copy(m1); + return m1_copy /= m2; + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat2x3.hpp b/MacroLibX/third_party/glm/detail/type_mat2x3.hpp new file mode 100644 index 0000000..d6596e4 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat2x3.hpp @@ -0,0 +1,159 @@ +/// @ref core +/// @file glm/detail/type_mat2x3.hpp + +#pragma once + +#include "type_vec2.hpp" +#include "type_vec3.hpp" +#include +#include + +namespace glm +{ + template + struct mat<2, 3, T, Q> + { + typedef vec<3, T, Q> col_type; + typedef vec<2, T, Q> row_type; + typedef mat<2, 3, T, Q> type; + typedef mat<3, 2, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[2]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 2; } + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<2, 3, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T x0, T y0, T z0, + T x1, T y1, T z1); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v0, + col_type const& v1); + + // -- Conversions -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + X1 x1, Y1 y1, Z1 z1, + X2 x2, Y2 y2, Z2 z2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<3, U, Q> const& v1, + vec<3, V, Q> const& v2); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<2, 3, T, Q> & operator=(mat<2, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 3, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<2, 3, T, Q> & operator+=(mat<2, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 3, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<2, 3, T, Q> & operator-=(mat<2, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 3, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<2, 3, T, Q> & operator/=(U s); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<2, 3, T, Q> & operator++ (); + GLM_FUNC_DECL mat<2, 3, T, Q> & operator-- (); + GLM_FUNC_DECL mat<2, 3, T, Q> operator++(int); + GLM_FUNC_DECL mat<2, 3, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator*(mat<2, 3, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator*(T scalar, mat<2, 3, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<2, 3, T, Q>::col_type operator*(mat<2, 3, T, Q> const& m, typename mat<2, 3, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<2, 3, T, Q>::row_type operator*(typename mat<2, 3, T, Q>::col_type const& v, mat<2, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<2, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<3, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<4, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator/(mat<2, 3, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator/(T scalar, mat<2, 3, T, Q> const& m); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat2x3.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_mat2x3.inl b/MacroLibX/third_party/glm/detail/type_mat2x3.inl new file mode 100644 index 0000000..5fec17e --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat2x3.inl @@ -0,0 +1,510 @@ +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0, 0), col_type(0, 1, 0)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0, 0); + this->value[1] = col_type(0, 1, 0); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<2, 3, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{m.value[0], m.value[1]} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m.value[0]; + this->value[1] = m.value[1]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(T scalar) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(scalar, 0, 0), col_type(0, scalar, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(scalar, 0, 0); + this->value[1] = col_type(0, scalar, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat + ( + T x0, T y0, T z0, + T x1, T y1, T z1 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0, z0), col_type(x1, y1, z1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0, z0); + this->value[1] = col_type(x1, y1, z1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(col_type const& v0, col_type const& v1) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v0); + this->value[1] = col_type(v1); +# endif + } + + // -- Conversion constructors -- + + template + template< + typename X1, typename Y1, typename Z1, + typename X2, typename Y2, typename Z2> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat + ( + X1 x1, Y1 y1, Z1 z1, + X2 x2, Y2 y2, Z2 z2 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x1, y1, z1), col_type(x2, y2, z2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x1, y1, z1); + this->value[1] = col_type(x2, y2, z2); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(vec<3, V1, Q> const& v1, vec<3, V2, Q> const& v2) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v1), col_type(v2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v1); + this->value[1] = col_type(v2); +# endif + } + + // -- Matrix conversions -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<2, 3, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<2, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<3, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<4, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<2, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<3, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<3, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<4, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<4, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<2, 3, T, Q>::col_type & mat<2, 3, T, Q>::operator[](typename mat<2, 3, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<2, 3, T, Q>::col_type const& mat<2, 3, T, Q>::operator[](typename mat<2, 3, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary updatable operators -- + + template + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator=(mat<2, 3, U, Q> const& m) + { + this->value[0] = m[0]; + this->value[1] = m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> & mat<2, 3, T, Q>::operator+=(U s) + { + this->value[0] += s; + this->value[1] += s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator+=(mat<2, 3, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator-=(U s) + { + this->value[0] -= s; + this->value[1] -= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator-=(mat<2, 3, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator*=(U s) + { + this->value[0] *= s; + this->value[1] *= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> & mat<2, 3, T, Q>::operator/=(U s) + { + this->value[0] /= s; + this->value[1] /= s; + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> & mat<2, 3, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> & mat<2, 3, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> mat<2, 3, T, Q>::operator++(int) + { + mat<2, 3, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> mat<2, 3, T, Q>::operator--(int) + { + mat<2, 3, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m) + { + return mat<2, 3, T, Q>( + -m[0], + -m[1]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m, T scalar) + { + return mat<2, 3, T, Q>( + m[0] + scalar, + m[1] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) + { + return mat<2, 3, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m, T scalar) + { + return mat<2, 3, T, Q>( + m[0] - scalar, + m[1] - scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) + { + return mat<2, 3, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(mat<2, 3, T, Q> const& m, T scalar) + { + return mat<2, 3, T, Q>( + m[0] * scalar, + m[1] * scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(T scalar, mat<2, 3, T, Q> const& m) + { + return mat<2, 3, T, Q>( + m[0] * scalar, + m[1] * scalar); + } + + template + GLM_FUNC_QUALIFIER typename mat<2, 3, T, Q>::col_type operator* + ( + mat<2, 3, T, Q> const& m, + typename mat<2, 3, T, Q>::row_type const& v) + { + return typename mat<2, 3, T, Q>::col_type( + m[0][0] * v.x + m[1][0] * v.y, + m[0][1] * v.x + m[1][1] * v.y, + m[0][2] * v.x + m[1][2] * v.y); + } + + template + GLM_FUNC_QUALIFIER typename mat<2, 3, T, Q>::row_type operator* + ( + typename mat<2, 3, T, Q>::col_type const& v, + mat<2, 3, T, Q> const& m) + { + return typename mat<2, 3, T, Q>::row_type( + v.x * m[0][0] + v.y * m[0][1] + v.z * m[0][2], + v.x * m[1][0] + v.y * m[1][1] + v.z * m[1][2]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<2, 2, T, Q> const& m2) + { + return mat<2, 3, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<3, 2, T, Q> const& m2) + { + T SrcA00 = m1[0][0]; + T SrcA01 = m1[0][1]; + T SrcA02 = m1[0][2]; + T SrcA10 = m1[1][0]; + T SrcA11 = m1[1][1]; + T SrcA12 = m1[1][2]; + + T SrcB00 = m2[0][0]; + T SrcB01 = m2[0][1]; + T SrcB10 = m2[1][0]; + T SrcB11 = m2[1][1]; + T SrcB20 = m2[2][0]; + T SrcB21 = m2[2][1]; + + mat<3, 3, T, Q> Result; + Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01; + Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01; + Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01; + Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11; + Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11; + Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11; + Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21; + Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21; + Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<4, 2, T, Q> const& m2) + { + return mat<4, 3, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1], + m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1], + m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1], + m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1], + m1[0][2] * m2[3][0] + m1[1][2] * m2[3][1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator/(mat<2, 3, T, Q> const& m, T scalar) + { + return mat<2, 3, T, Q>( + m[0] / scalar, + m[1] / scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator/(T scalar, mat<2, 3, T, Q> const& m) + { + return mat<2, 3, T, Q>( + scalar / m[0], + scalar / m[1]); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat2x4.hpp b/MacroLibX/third_party/glm/detail/type_mat2x4.hpp new file mode 100644 index 0000000..ff03e21 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat2x4.hpp @@ -0,0 +1,161 @@ +/// @ref core +/// @file glm/detail/type_mat2x4.hpp + +#pragma once + +#include "type_vec2.hpp" +#include "type_vec4.hpp" +#include +#include + +namespace glm +{ + template + struct mat<2, 4, T, Q> + { + typedef vec<4, T, Q> col_type; + typedef vec<2, T, Q> row_type; + typedef mat<2, 4, T, Q> type; + typedef mat<4, 2, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[2]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 2; } + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<2, 4, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T x0, T y0, T z0, T w0, + T x1, T y1, T z1, T w1); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v0, + col_type const& v1); + + // -- Conversions -- + + template< + typename X1, typename Y1, typename Z1, typename W1, + typename X2, typename Y2, typename Z2, typename W2> + GLM_FUNC_DECL GLM_CONSTEXPR mat( + X1 x1, Y1 y1, Z1 z1, W1 w1, + X2 x2, Y2 y2, Z2 z2, W2 w2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<4, U, Q> const& v1, + vec<4, V, Q> const& v2); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<2, 4, T, Q> & operator=(mat<2, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 4, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<2, 4, T, Q> & operator+=(mat<2, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 4, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<2, 4, T, Q> & operator-=(mat<2, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<2, 4, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<2, 4, T, Q> & operator/=(U s); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<2, 4, T, Q> & operator++ (); + GLM_FUNC_DECL mat<2, 4, T, Q> & operator-- (); + GLM_FUNC_DECL mat<2, 4, T, Q> operator++(int); + GLM_FUNC_DECL mat<2, 4, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator*(mat<2, 4, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator*(T scalar, mat<2, 4, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<2, 4, T, Q>::col_type operator*(mat<2, 4, T, Q> const& m, typename mat<2, 4, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<2, 4, T, Q>::row_type operator*(typename mat<2, 4, T, Q>::col_type const& v, mat<2, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<4, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<2, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<3, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator/(mat<2, 4, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator/(T scalar, mat<2, 4, T, Q> const& m); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat2x4.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_mat2x4.inl b/MacroLibX/third_party/glm/detail/type_mat2x4.inl new file mode 100644 index 0000000..b6d2b9d --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat2x4.inl @@ -0,0 +1,520 @@ +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0, 0, 0), col_type(0, 1, 0, 0)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0, 0, 0); + this->value[1] = col_type(0, 1, 0, 0); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<2, 4, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{m[0], m[1]} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(T s) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(s, 0, 0, 0), col_type(0, s, 0, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(s, 0, 0, 0); + this->value[1] = col_type(0, s, 0, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat + ( + T x0, T y0, T z0, T w0, + T x1, T y1, T z1, T w1 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0, z0, w0), col_type(x1, y1, z1, w1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0, z0, w0); + this->value[1] = col_type(x1, y1, z1, w1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(col_type const& v0, col_type const& v1) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = v0; + this->value[1] = v1; +# endif + } + + // -- Conversion constructors -- + + template + template< + typename X1, typename Y1, typename Z1, typename W1, + typename X2, typename Y2, typename Z2, typename W2> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat + ( + X1 x1, Y1 y1, Z1 z1, W1 w1, + X2 x2, Y2 y2, Z2 z2, W2 w2 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{ + col_type(x1, y1, z1, w1), + col_type(x2, y2, z2, w2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x1, y1, z1, w1); + this->value[1] = col_type(x2, y2, z2, w2); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(vec<4, V1, Q> const& v1, vec<4, V2, Q> const& v2) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v1), col_type(v2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v1); + this->value[1] = col_type(v2); +# endif + } + + // -- Matrix conversions -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<2, 4, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<2, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<3, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<4, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<2, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<3, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<3, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<4, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<4, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<2, 4, T, Q>::col_type & mat<2, 4, T, Q>::operator[](typename mat<2, 4, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<2, 4, T, Q>::col_type const& mat<2, 4, T, Q>::operator[](typename mat<2, 4, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary updatable operators -- + + template + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator=(mat<2, 4, U, Q> const& m) + { + this->value[0] = m[0]; + this->value[1] = m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator+=(U s) + { + this->value[0] += s; + this->value[1] += s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator+=(mat<2, 4, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator-=(U s) + { + this->value[0] -= s; + this->value[1] -= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator-=(mat<2, 4, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator*=(U s) + { + this->value[0] *= s; + this->value[1] *= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> & mat<2, 4, T, Q>::operator/=(U s) + { + this->value[0] /= s; + this->value[1] /= s; + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> mat<2, 4, T, Q>::operator++(int) + { + mat<2, 4, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> mat<2, 4, T, Q>::operator--(int) + { + mat<2, 4, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m) + { + return mat<2, 4, T, Q>( + -m[0], + -m[1]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m, T scalar) + { + return mat<2, 4, T, Q>( + m[0] + scalar, + m[1] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) + { + return mat<2, 4, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m, T scalar) + { + return mat<2, 4, T, Q>( + m[0] - scalar, + m[1] - scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) + { + return mat<2, 4, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(mat<2, 4, T, Q> const& m, T scalar) + { + return mat<2, 4, T, Q>( + m[0] * scalar, + m[1] * scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(T scalar, mat<2, 4, T, Q> const& m) + { + return mat<2, 4, T, Q>( + m[0] * scalar, + m[1] * scalar); + } + + template + GLM_FUNC_QUALIFIER typename mat<2, 4, T, Q>::col_type operator*(mat<2, 4, T, Q> const& m, typename mat<2, 4, T, Q>::row_type const& v) + { + return typename mat<2, 4, T, Q>::col_type( + m[0][0] * v.x + m[1][0] * v.y, + m[0][1] * v.x + m[1][1] * v.y, + m[0][2] * v.x + m[1][2] * v.y, + m[0][3] * v.x + m[1][3] * v.y); + } + + template + GLM_FUNC_QUALIFIER typename mat<2, 4, T, Q>::row_type operator*(typename mat<2, 4, T, Q>::col_type const& v, mat<2, 4, T, Q> const& m) + { + return typename mat<2, 4, T, Q>::row_type( + v.x * m[0][0] + v.y * m[0][1] + v.z * m[0][2] + v.w * m[0][3], + v.x * m[1][0] + v.y * m[1][1] + v.z * m[1][2] + v.w * m[1][3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<4, 2, T, Q> const& m2) + { + T SrcA00 = m1[0][0]; + T SrcA01 = m1[0][1]; + T SrcA02 = m1[0][2]; + T SrcA03 = m1[0][3]; + T SrcA10 = m1[1][0]; + T SrcA11 = m1[1][1]; + T SrcA12 = m1[1][2]; + T SrcA13 = m1[1][3]; + + T SrcB00 = m2[0][0]; + T SrcB01 = m2[0][1]; + T SrcB10 = m2[1][0]; + T SrcB11 = m2[1][1]; + T SrcB20 = m2[2][0]; + T SrcB21 = m2[2][1]; + T SrcB30 = m2[3][0]; + T SrcB31 = m2[3][1]; + + mat<4, 4, T, Q> Result; + Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01; + Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01; + Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01; + Result[0][3] = SrcA03 * SrcB00 + SrcA13 * SrcB01; + Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11; + Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11; + Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11; + Result[1][3] = SrcA03 * SrcB10 + SrcA13 * SrcB11; + Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21; + Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21; + Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21; + Result[2][3] = SrcA03 * SrcB20 + SrcA13 * SrcB21; + Result[3][0] = SrcA00 * SrcB30 + SrcA10 * SrcB31; + Result[3][1] = SrcA01 * SrcB30 + SrcA11 * SrcB31; + Result[3][2] = SrcA02 * SrcB30 + SrcA12 * SrcB31; + Result[3][3] = SrcA03 * SrcB30 + SrcA13 * SrcB31; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<2, 2, T, Q> const& m2) + { + return mat<2, 4, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1], + m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1], + m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<3, 2, T, Q> const& m2) + { + return mat<3, 4, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1], + m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1], + m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1], + m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1], + m1[0][3] * m2[2][0] + m1[1][3] * m2[2][1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator/(mat<2, 4, T, Q> const& m, T scalar) + { + return mat<2, 4, T, Q>( + m[0] / scalar, + m[1] / scalar); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator/(T scalar, mat<2, 4, T, Q> const& m) + { + return mat<2, 4, T, Q>( + scalar / m[0], + scalar / m[1]); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat3x2.hpp b/MacroLibX/third_party/glm/detail/type_mat3x2.hpp new file mode 100644 index 0000000..e166581 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat3x2.hpp @@ -0,0 +1,167 @@ +/// @ref core +/// @file glm/detail/type_mat3x2.hpp + +#pragma once + +#include "type_vec2.hpp" +#include "type_vec3.hpp" +#include +#include + +namespace glm +{ + template + struct mat<3, 2, T, Q> + { + typedef vec<2, T, Q> col_type; + typedef vec<3, T, Q> row_type; + typedef mat<3, 2, T, Q> type; + typedef mat<2, 3, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[3]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 3; } + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<3, 2, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T x0, T y0, + T x1, T y1, + T x2, T y2); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v0, + col_type const& v1, + col_type const& v2); + + // -- Conversions -- + + template< + typename X1, typename Y1, + typename X2, typename Y2, + typename X3, typename Y3> + GLM_FUNC_DECL GLM_CONSTEXPR mat( + X1 x1, Y1 y1, + X2 x2, Y2 y2, + X3 x3, Y3 y3); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<2, V1, Q> const& v1, + vec<2, V2, Q> const& v2, + vec<2, V3, Q> const& v3); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<3, 2, T, Q> & operator=(mat<3, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 2, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<3, 2, T, Q> & operator+=(mat<3, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 2, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<3, 2, T, Q> & operator-=(mat<3, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 2, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<3, 2, T, Q> & operator/=(U s); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<3, 2, T, Q> & operator++ (); + GLM_FUNC_DECL mat<3, 2, T, Q> & operator-- (); + GLM_FUNC_DECL mat<3, 2, T, Q> operator++(int); + GLM_FUNC_DECL mat<3, 2, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator*(mat<3, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator*(T scalar, mat<3, 2, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<3, 2, T, Q>::col_type operator*(mat<3, 2, T, Q> const& m, typename mat<3, 2, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<3, 2, T, Q>::row_type operator*(typename mat<3, 2, T, Q>::col_type const& v, mat<3, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<2, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<3, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<4, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator/(mat<3, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator/(T scalar, mat<3, 2, T, Q> const& m); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); + +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat3x2.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_mat3x2.inl b/MacroLibX/third_party/glm/detail/type_mat3x2.inl new file mode 100644 index 0000000..b4b948b --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat3x2.inl @@ -0,0 +1,532 @@ +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0), col_type(0, 1), col_type(0, 0)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0); + this->value[1] = col_type(0, 1); + this->value[2] = col_type(0, 0); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<3, 2, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(T s) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(s, 0), col_type(0, s), col_type(0, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(s, 0); + this->value[1] = col_type(0, s); + this->value[2] = col_type(0, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat + ( + T x0, T y0, + T x1, T y1, + T x2, T y2 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0), col_type(x1, y1), col_type(x2, y2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0); + this->value[1] = col_type(x1, y1); + this->value[2] = col_type(x2, y2); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = v0; + this->value[1] = v1; + this->value[2] = v2; +# endif + } + + // -- Conversion constructors -- + + template + template< + typename X0, typename Y0, + typename X1, typename Y1, + typename X2, typename Y2> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat + ( + X0 x0, Y0 y0, + X1 x1, Y1 y1, + X2 x2, Y2 y2 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0), col_type(x1, y1), col_type(x2, y2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0); + this->value[1] = col_type(x1, y1); + this->value[2] = col_type(x2, y2); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(vec<2, V0, Q> const& v0, vec<2, V1, Q> const& v1, vec<2, V2, Q> const& v2) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v0); + this->value[1] = col_type(v1); + this->value[2] = col_type(v2); +# endif + } + + // -- Matrix conversions -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<3, 2, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<2, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<3, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<4, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<2, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<2, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<3, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<4, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<4, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<3, 2, T, Q>::col_type & mat<3, 2, T, Q>::operator[](typename mat<3, 2, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<3, 2, T, Q>::col_type const& mat<3, 2, T, Q>::operator[](typename mat<3, 2, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary updatable operators -- + + template + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator=(mat<3, 2, U, Q> const& m) + { + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator+=(U s) + { + this->value[0] += s; + this->value[1] += s; + this->value[2] += s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator+=(mat<3, 2, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + this->value[2] += m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator-=(U s) + { + this->value[0] -= s; + this->value[1] -= s; + this->value[2] -= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator-=(mat<3, 2, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + this->value[2] -= m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator*=(U s) + { + this->value[0] *= s; + this->value[1] *= s; + this->value[2] *= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> & mat<3, 2, T, Q>::operator/=(U s) + { + this->value[0] /= s; + this->value[1] /= s; + this->value[2] /= s; + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + ++this->value[2]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + --this->value[2]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> mat<3, 2, T, Q>::operator++(int) + { + mat<3, 2, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> mat<3, 2, T, Q>::operator--(int) + { + mat<3, 2, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m) + { + return mat<3, 2, T, Q>( + -m[0], + -m[1], + -m[2]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m, T scalar) + { + return mat<3, 2, T, Q>( + m[0] + scalar, + m[1] + scalar, + m[2] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) + { + return mat<3, 2, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1], + m1[2] + m2[2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m, T scalar) + { + return mat<3, 2, T, Q>( + m[0] - scalar, + m[1] - scalar, + m[2] - scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) + { + return mat<3, 2, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1], + m1[2] - m2[2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(mat<3, 2, T, Q> const& m, T scalar) + { + return mat<3, 2, T, Q>( + m[0] * scalar, + m[1] * scalar, + m[2] * scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(T scalar, mat<3, 2, T, Q> const& m) + { + return mat<3, 2, T, Q>( + m[0] * scalar, + m[1] * scalar, + m[2] * scalar); + } + + template + GLM_FUNC_QUALIFIER typename mat<3, 2, T, Q>::col_type operator*(mat<3, 2, T, Q> const& m, typename mat<3, 2, T, Q>::row_type const& v) + { + return typename mat<3, 2, T, Q>::col_type( + m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z, + m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z); + } + + template + GLM_FUNC_QUALIFIER typename mat<3, 2, T, Q>::row_type operator*(typename mat<3, 2, T, Q>::col_type const& v, mat<3, 2, T, Q> const& m) + { + return typename mat<3, 2, T, Q>::row_type( + v.x * m[0][0] + v.y * m[0][1], + v.x * m[1][0] + v.y * m[1][1], + v.x * m[2][0] + v.y * m[2][1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<2, 3, T, Q> const& m2) + { + const T SrcA00 = m1[0][0]; + const T SrcA01 = m1[0][1]; + const T SrcA10 = m1[1][0]; + const T SrcA11 = m1[1][1]; + const T SrcA20 = m1[2][0]; + const T SrcA21 = m1[2][1]; + + const T SrcB00 = m2[0][0]; + const T SrcB01 = m2[0][1]; + const T SrcB02 = m2[0][2]; + const T SrcB10 = m2[1][0]; + const T SrcB11 = m2[1][1]; + const T SrcB12 = m2[1][2]; + + mat<2, 2, T, Q> Result; + Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02; + Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02; + Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12; + Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<3, 3, T, Q> const& m2) + { + return mat<3, 2, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<4, 3, T, Q> const& m2) + { + return mat<4, 2, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2], + m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1] + m1[2][0] * m2[3][2], + m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1] + m1[2][1] * m2[3][2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator/(mat<3, 2, T, Q> const& m, T scalar) + { + return mat<3, 2, T, Q>( + m[0] / scalar, + m[1] / scalar, + m[2] / scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator/(T scalar, mat<3, 2, T, Q> const& m) + { + return mat<3, 2, T, Q>( + scalar / m[0], + scalar / m[1], + scalar / m[2]); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat3x3.hpp b/MacroLibX/third_party/glm/detail/type_mat3x3.hpp new file mode 100644 index 0000000..3174872 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat3x3.hpp @@ -0,0 +1,184 @@ +/// @ref core +/// @file glm/detail/type_mat3x3.hpp + +#pragma once + +#include "type_vec3.hpp" +#include +#include + +namespace glm +{ + template + struct mat<3, 3, T, Q> + { + typedef vec<3, T, Q> col_type; + typedef vec<3, T, Q> row_type; + typedef mat<3, 3, T, Q> type; + typedef mat<3, 3, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[3]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 3; } + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<3, 3, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T x0, T y0, T z0, + T x1, T y1, T z1, + T x2, T y2, T z2); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v0, + col_type const& v1, + col_type const& v2); + + // -- Conversions -- + + template< + typename X1, typename Y1, typename Z1, + typename X2, typename Y2, typename Z2, + typename X3, typename Y3, typename Z3> + GLM_FUNC_DECL GLM_CONSTEXPR mat( + X1 x1, Y1 y1, Z1 z1, + X2 x2, Y2 y2, Z2 z2, + X3 x3, Y3 y3, Z3 z3); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<3, V1, Q> const& v1, + vec<3, V2, Q> const& v2, + vec<3, V3, Q> const& v3); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator=(mat<3, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator+=(mat<3, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator-=(mat<3, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator*=(mat<3, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator/=(U s); + template + GLM_FUNC_DECL mat<3, 3, T, Q> & operator/=(mat<3, 3, U, Q> const& m); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<3, 3, T, Q> & operator++(); + GLM_FUNC_DECL mat<3, 3, T, Q> & operator--(); + GLM_FUNC_DECL mat<3, 3, T, Q> operator++(int); + GLM_FUNC_DECL mat<3, 3, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator+(T scalar, mat<3, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator-(T scalar, mat<3, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator*(mat<3, 3, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator*(T scalar, mat<3, 3, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<3, 3, T, Q>::col_type operator*(mat<3, 3, T, Q> const& m, typename mat<3, 3, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<3, 3, T, Q>::row_type operator*(typename mat<3, 3, T, Q>::col_type const& v, mat<3, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator/(mat<3, 3, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator/(T scalar, mat<3, 3, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<3, 3, T, Q>::col_type operator/(mat<3, 3, T, Q> const& m, typename mat<3, 3, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<3, 3, T, Q>::row_type operator/(typename mat<3, 3, T, Q>::col_type const& v, mat<3, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator/(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat3x3.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_mat3x3.inl b/MacroLibX/third_party/glm/detail/type_mat3x3.inl new file mode 100644 index 0000000..1ddaf99 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat3x3.inl @@ -0,0 +1,601 @@ +#include "../matrix.hpp" + +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0, 0), col_type(0, 1, 0), col_type(0, 0, 1)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0, 0); + this->value[1] = col_type(0, 1, 0); + this->value[2] = col_type(0, 0, 1); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<3, 3, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(T s) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(s, 0, 0), col_type(0, s, 0), col_type(0, 0, s)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(s, 0, 0); + this->value[1] = col_type(0, s, 0); + this->value[2] = col_type(0, 0, s); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat + ( + T x0, T y0, T z0, + T x1, T y1, T z1, + T x2, T y2, T z2 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0, z0), col_type(x1, y1, z1), col_type(x2, y2, z2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0, z0); + this->value[1] = col_type(x1, y1, z1); + this->value[2] = col_type(x2, y2, z2); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v0); + this->value[1] = col_type(v1); + this->value[2] = col_type(v2); +# endif + } + + // -- Conversion constructors -- + + template + template< + typename X1, typename Y1, typename Z1, + typename X2, typename Y2, typename Z2, + typename X3, typename Y3, typename Z3> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat + ( + X1 x1, Y1 y1, Z1 z1, + X2 x2, Y2 y2, Z2 z2, + X3 x3, Y3 y3, Z3 z3 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x1, y1, z1), col_type(x2, y2, z2), col_type(x3, y3, z3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x1, y1, z1); + this->value[1] = col_type(x2, y2, z2); + this->value[2] = col_type(x3, y3, z3); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(vec<3, V1, Q> const& v1, vec<3, V2, Q> const& v2, vec<3, V3, Q> const& v3) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v1), col_type(v2), col_type(v3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v1); + this->value[1] = col_type(v2); + this->value[2] = col_type(v3); +# endif + } + + // -- Matrix conversions -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<3, 3, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<2, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<4, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<2, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<3, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(m[2], 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<2, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<4, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(m[2], 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<3, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<4, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::col_type & mat<3, 3, T, Q>::operator[](typename mat<3, 3, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<3, 3, T, Q>::col_type const& mat<3, 3, T, Q>::operator[](typename mat<3, 3, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary updatable operators -- + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator=(mat<3, 3, U, Q> const& m) + { + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator+=(U s) + { + this->value[0] += s; + this->value[1] += s; + this->value[2] += s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator+=(mat<3, 3, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + this->value[2] += m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator-=(U s) + { + this->value[0] -= s; + this->value[1] -= s; + this->value[2] -= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator-=(mat<3, 3, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + this->value[2] -= m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator*=(U s) + { + this->value[0] *= s; + this->value[1] *= s; + this->value[2] *= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator*=(mat<3, 3, U, Q> const& m) + { + return (*this = *this * m); + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator/=(U s) + { + this->value[0] /= s; + this->value[1] /= s; + this->value[2] /= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator/=(mat<3, 3, U, Q> const& m) + { + return *this *= inverse(m); + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + ++this->value[2]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + --this->value[2]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> mat<3, 3, T, Q>::operator++(int) + { + mat<3, 3, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> mat<3, 3, T, Q>::operator--(int) + { + mat<3, 3, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m) + { + return mat<3, 3, T, Q>( + -m[0], + -m[1], + -m[2]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m, T scalar) + { + return mat<3, 3, T, Q>( + m[0] + scalar, + m[1] + scalar, + m[2] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator+(T scalar, mat<3, 3, T, Q> const& m) + { + return mat<3, 3, T, Q>( + m[0] + scalar, + m[1] + scalar, + m[2] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) + { + return mat<3, 3, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1], + m1[2] + m2[2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m, T scalar) + { + return mat<3, 3, T, Q>( + m[0] - scalar, + m[1] - scalar, + m[2] - scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator-(T scalar, mat<3, 3, T, Q> const& m) + { + return mat<3, 3, T, Q>( + scalar - m[0], + scalar - m[1], + scalar - m[2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) + { + return mat<3, 3, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1], + m1[2] - m2[2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(mat<3, 3, T, Q> const& m, T scalar) + { + return mat<3, 3, T, Q>( + m[0] * scalar, + m[1] * scalar, + m[2] * scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(T scalar, mat<3, 3, T, Q> const& m) + { + return mat<3, 3, T, Q>( + m[0] * scalar, + m[1] * scalar, + m[2] * scalar); + } + + template + GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::col_type operator*(mat<3, 3, T, Q> const& m, typename mat<3, 3, T, Q>::row_type const& v) + { + return typename mat<3, 3, T, Q>::col_type( + m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z, + m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z, + m[0][2] * v.x + m[1][2] * v.y + m[2][2] * v.z); + } + + template + GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::row_type operator*(typename mat<3, 3, T, Q>::col_type const& v, mat<3, 3, T, Q> const& m) + { + return typename mat<3, 3, T, Q>::row_type( + m[0][0] * v.x + m[0][1] * v.y + m[0][2] * v.z, + m[1][0] * v.x + m[1][1] * v.y + m[1][2] * v.z, + m[2][0] * v.x + m[2][1] * v.y + m[2][2] * v.z); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) + { + T const SrcA00 = m1[0][0]; + T const SrcA01 = m1[0][1]; + T const SrcA02 = m1[0][2]; + T const SrcA10 = m1[1][0]; + T const SrcA11 = m1[1][1]; + T const SrcA12 = m1[1][2]; + T const SrcA20 = m1[2][0]; + T const SrcA21 = m1[2][1]; + T const SrcA22 = m1[2][2]; + + T const SrcB00 = m2[0][0]; + T const SrcB01 = m2[0][1]; + T const SrcB02 = m2[0][2]; + T const SrcB10 = m2[1][0]; + T const SrcB11 = m2[1][1]; + T const SrcB12 = m2[1][2]; + T const SrcB20 = m2[2][0]; + T const SrcB21 = m2[2][1]; + T const SrcB22 = m2[2][2]; + + mat<3, 3, T, Q> Result; + Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02; + Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02; + Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01 + SrcA22 * SrcB02; + Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12; + Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12; + Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11 + SrcA22 * SrcB12; + Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21 + SrcA20 * SrcB22; + Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21 + SrcA21 * SrcB22; + Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21 + SrcA22 * SrcB22; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) + { + return mat<2, 3, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) + { + return mat<4, 3, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2], + m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1] + m1[2][2] * m2[2][2], + m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1] + m1[2][0] * m2[3][2], + m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1] + m1[2][1] * m2[3][2], + m1[0][2] * m2[3][0] + m1[1][2] * m2[3][1] + m1[2][2] * m2[3][2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator/(mat<3, 3, T, Q> const& m, T scalar) + { + return mat<3, 3, T, Q>( + m[0] / scalar, + m[1] / scalar, + m[2] / scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator/(T scalar, mat<3, 3, T, Q> const& m) + { + return mat<3, 3, T, Q>( + scalar / m[0], + scalar / m[1], + scalar / m[2]); + } + + template + GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::col_type operator/(mat<3, 3, T, Q> const& m, typename mat<3, 3, T, Q>::row_type const& v) + { + return inverse(m) * v; + } + + template + GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::row_type operator/(typename mat<3, 3, T, Q>::col_type const& v, mat<3, 3, T, Q> const& m) + { + return v * inverse(m); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator/(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) + { + mat<3, 3, T, Q> m1_copy(m1); + return m1_copy /= m2; + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat3x4.hpp b/MacroLibX/third_party/glm/detail/type_mat3x4.hpp new file mode 100644 index 0000000..6e40b90 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat3x4.hpp @@ -0,0 +1,166 @@ +/// @ref core +/// @file glm/detail/type_mat3x4.hpp + +#pragma once + +#include "type_vec3.hpp" +#include "type_vec4.hpp" +#include +#include + +namespace glm +{ + template + struct mat<3, 4, T, Q> + { + typedef vec<4, T, Q> col_type; + typedef vec<3, T, Q> row_type; + typedef mat<3, 4, T, Q> type; + typedef mat<4, 3, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[3]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 3; } + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<3, 4, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T x0, T y0, T z0, T w0, + T x1, T y1, T z1, T w1, + T x2, T y2, T z2, T w2); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v0, + col_type const& v1, + col_type const& v2); + + // -- Conversions -- + + template< + typename X1, typename Y1, typename Z1, typename W1, + typename X2, typename Y2, typename Z2, typename W2, + typename X3, typename Y3, typename Z3, typename W3> + GLM_FUNC_DECL GLM_CONSTEXPR mat( + X1 x1, Y1 y1, Z1 z1, W1 w1, + X2 x2, Y2 y2, Z2 z2, W2 w2, + X3 x3, Y3 y3, Z3 z3, W3 w3); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<4, V1, Q> const& v1, + vec<4, V2, Q> const& v2, + vec<4, V3, Q> const& v3); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<3, 4, T, Q> & operator=(mat<3, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 4, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<3, 4, T, Q> & operator+=(mat<3, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 4, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<3, 4, T, Q> & operator-=(mat<3, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<3, 4, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<3, 4, T, Q> & operator/=(U s); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<3, 4, T, Q> & operator++(); + GLM_FUNC_DECL mat<3, 4, T, Q> & operator--(); + GLM_FUNC_DECL mat<3, 4, T, Q> operator++(int); + GLM_FUNC_DECL mat<3, 4, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator*(mat<3, 4, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator*(T scalar, mat<3, 4, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<3, 4, T, Q>::col_type operator*(mat<3, 4, T, Q> const& m, typename mat<3, 4, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<3, 4, T, Q>::row_type operator*(typename mat<3, 4, T, Q>::col_type const& v, mat<3, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<4, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<2, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<3, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator/(mat<3, 4, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator/(T scalar, mat<3, 4, T, Q> const& m); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat3x4.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_mat3x4.inl b/MacroLibX/third_party/glm/detail/type_mat3x4.inl new file mode 100644 index 0000000..6ee416c --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat3x4.inl @@ -0,0 +1,578 @@ +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0, 0, 0), col_type(0, 1, 0, 0), col_type(0, 0, 1, 0)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0, 0, 0); + this->value[1] = col_type(0, 1, 0, 0); + this->value[2] = col_type(0, 0, 1, 0); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<3, 4, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(T s) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(s, 0, 0, 0), col_type(0, s, 0, 0), col_type(0, 0, s, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(s, 0, 0, 0); + this->value[1] = col_type(0, s, 0, 0); + this->value[2] = col_type(0, 0, s, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat + ( + T x0, T y0, T z0, T w0, + T x1, T y1, T z1, T w1, + T x2, T y2, T z2, T w2 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{ + col_type(x0, y0, z0, w0), + col_type(x1, y1, z1, w1), + col_type(x2, y2, z2, w2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0, z0, w0); + this->value[1] = col_type(x1, y1, z1, w1); + this->value[2] = col_type(x2, y2, z2, w2); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = v0; + this->value[1] = v1; + this->value[2] = v2; +# endif + } + + // -- Conversion constructors -- + + template + template< + typename X0, typename Y0, typename Z0, typename W0, + typename X1, typename Y1, typename Z1, typename W1, + typename X2, typename Y2, typename Z2, typename W2> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat + ( + X0 x0, Y0 y0, Z0 z0, W0 w0, + X1 x1, Y1 y1, Z1 z1, W1 w1, + X2 x2, Y2 y2, Z2 z2, W2 w2 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{ + col_type(x0, y0, z0, w0), + col_type(x1, y1, z1, w1), + col_type(x2, y2, z2, w2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0, z0, w0); + this->value[1] = col_type(x1, y1, z1, w1); + this->value[2] = col_type(x2, y2, z2, w2); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(vec<4, V1, Q> const& v0, vec<4, V2, Q> const& v1, vec<4, V3, Q> const& v2) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v0); + this->value[1] = col_type(v1); + this->value[2] = col_type(v2); +# endif + } + + // -- Matrix conversions -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<3, 4, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<2, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(0, 0, 1, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); + this->value[2] = col_type(0, 0, 1, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<3, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(m[2], 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<4, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<2, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(0, 0, 1, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(0, 0, 1, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<3, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(m[2], 1, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); + this->value[2] = col_type(m[2], 1, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<2, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0, 0, 1, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<4, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(m[2], 1, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); + this->value[2] = col_type(m[2], 1, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<4, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(m[2], 0); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<3, 4, T, Q>::col_type & mat<3, 4, T, Q>::operator[](typename mat<3, 4, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<3, 4, T, Q>::col_type const& mat<3, 4, T, Q>::operator[](typename mat<3, 4, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary updatable operators -- + + template + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator=(mat<3, 4, U, Q> const& m) + { + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator+=(U s) + { + this->value[0] += s; + this->value[1] += s; + this->value[2] += s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator+=(mat<3, 4, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + this->value[2] += m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator-=(U s) + { + this->value[0] -= s; + this->value[1] -= s; + this->value[2] -= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator-=(mat<3, 4, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + this->value[2] -= m[2]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator*=(U s) + { + this->value[0] *= s; + this->value[1] *= s; + this->value[2] *= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> & mat<3, 4, T, Q>::operator/=(U s) + { + this->value[0] /= s; + this->value[1] /= s; + this->value[2] /= s; + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + ++this->value[2]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + --this->value[2]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> mat<3, 4, T, Q>::operator++(int) + { + mat<3, 4, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> mat<3, 4, T, Q>::operator--(int) + { + mat<3, 4, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m) + { + return mat<3, 4, T, Q>( + -m[0], + -m[1], + -m[2]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m, T scalar) + { + return mat<3, 4, T, Q>( + m[0] + scalar, + m[1] + scalar, + m[2] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) + { + return mat<3, 4, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1], + m1[2] + m2[2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m, T scalar) + { + return mat<3, 4, T, Q>( + m[0] - scalar, + m[1] - scalar, + m[2] - scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) + { + return mat<3, 4, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1], + m1[2] - m2[2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(mat<3, 4, T, Q> const& m, T scalar) + { + return mat<3, 4, T, Q>( + m[0] * scalar, + m[1] * scalar, + m[2] * scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(T scalar, mat<3, 4, T, Q> const& m) + { + return mat<3, 4, T, Q>( + m[0] * scalar, + m[1] * scalar, + m[2] * scalar); + } + + template + GLM_FUNC_QUALIFIER typename mat<3, 4, T, Q>::col_type operator* + ( + mat<3, 4, T, Q> const& m, + typename mat<3, 4, T, Q>::row_type const& v + ) + { + return typename mat<3, 4, T, Q>::col_type( + m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z, + m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z, + m[0][2] * v.x + m[1][2] * v.y + m[2][2] * v.z, + m[0][3] * v.x + m[1][3] * v.y + m[2][3] * v.z); + } + + template + GLM_FUNC_QUALIFIER typename mat<3, 4, T, Q>::row_type operator* + ( + typename mat<3, 4, T, Q>::col_type const& v, + mat<3, 4, T, Q> const& m + ) + { + return typename mat<3, 4, T, Q>::row_type( + v.x * m[0][0] + v.y * m[0][1] + v.z * m[0][2] + v.w * m[0][3], + v.x * m[1][0] + v.y * m[1][1] + v.z * m[1][2] + v.w * m[1][3], + v.x * m[2][0] + v.y * m[2][1] + v.z * m[2][2] + v.w * m[2][3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<4, 3, T, Q> const& m2) + { + const T SrcA00 = m1[0][0]; + const T SrcA01 = m1[0][1]; + const T SrcA02 = m1[0][2]; + const T SrcA03 = m1[0][3]; + const T SrcA10 = m1[1][0]; + const T SrcA11 = m1[1][1]; + const T SrcA12 = m1[1][2]; + const T SrcA13 = m1[1][3]; + const T SrcA20 = m1[2][0]; + const T SrcA21 = m1[2][1]; + const T SrcA22 = m1[2][2]; + const T SrcA23 = m1[2][3]; + + const T SrcB00 = m2[0][0]; + const T SrcB01 = m2[0][1]; + const T SrcB02 = m2[0][2]; + const T SrcB10 = m2[1][0]; + const T SrcB11 = m2[1][1]; + const T SrcB12 = m2[1][2]; + const T SrcB20 = m2[2][0]; + const T SrcB21 = m2[2][1]; + const T SrcB22 = m2[2][2]; + const T SrcB30 = m2[3][0]; + const T SrcB31 = m2[3][1]; + const T SrcB32 = m2[3][2]; + + mat<4, 4, T, Q> Result; + Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02; + Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02; + Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01 + SrcA22 * SrcB02; + Result[0][3] = SrcA03 * SrcB00 + SrcA13 * SrcB01 + SrcA23 * SrcB02; + Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12; + Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12; + Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11 + SrcA22 * SrcB12; + Result[1][3] = SrcA03 * SrcB10 + SrcA13 * SrcB11 + SrcA23 * SrcB12; + Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21 + SrcA20 * SrcB22; + Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21 + SrcA21 * SrcB22; + Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21 + SrcA22 * SrcB22; + Result[2][3] = SrcA03 * SrcB20 + SrcA13 * SrcB21 + SrcA23 * SrcB22; + Result[3][0] = SrcA00 * SrcB30 + SrcA10 * SrcB31 + SrcA20 * SrcB32; + Result[3][1] = SrcA01 * SrcB30 + SrcA11 * SrcB31 + SrcA21 * SrcB32; + Result[3][2] = SrcA02 * SrcB30 + SrcA12 * SrcB31 + SrcA22 * SrcB32; + Result[3][3] = SrcA03 * SrcB30 + SrcA13 * SrcB31 + SrcA23 * SrcB32; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<2, 3, T, Q> const& m2) + { + return mat<2, 4, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2], + m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1] + m1[2][3] * m2[0][2], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2], + m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1] + m1[2][3] * m2[1][2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<3, 3, T, Q> const& m2) + { + return mat<3, 4, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2], + m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1] + m1[2][3] * m2[0][2], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2], + m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1] + m1[2][3] * m2[1][2], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2], + m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1] + m1[2][2] * m2[2][2], + m1[0][3] * m2[2][0] + m1[1][3] * m2[2][1] + m1[2][3] * m2[2][2]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator/(mat<3, 4, T, Q> const& m, T scalar) + { + return mat<3, 4, T, Q>( + m[0] / scalar, + m[1] / scalar, + m[2] / scalar); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator/(T scalar, mat<3, 4, T, Q> const& m) + { + return mat<3, 4, T, Q>( + scalar / m[0], + scalar / m[1], + scalar / m[2]); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat4x2.hpp b/MacroLibX/third_party/glm/detail/type_mat4x2.hpp new file mode 100644 index 0000000..8d34352 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat4x2.hpp @@ -0,0 +1,171 @@ +/// @ref core +/// @file glm/detail/type_mat4x2.hpp + +#pragma once + +#include "type_vec2.hpp" +#include "type_vec4.hpp" +#include +#include + +namespace glm +{ + template + struct mat<4, 2, T, Q> + { + typedef vec<2, T, Q> col_type; + typedef vec<4, T, Q> row_type; + typedef mat<4, 2, T, Q> type; + typedef mat<2, 4, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[4]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 4; } + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<4, 2, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T x0, T y0, + T x1, T y1, + T x2, T y2, + T x3, T y3); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v0, + col_type const& v1, + col_type const& v2, + col_type const& v3); + + // -- Conversions -- + + template< + typename X0, typename Y0, + typename X1, typename Y1, + typename X2, typename Y2, + typename X3, typename Y3> + GLM_FUNC_DECL GLM_CONSTEXPR mat( + X0 x0, Y0 y0, + X1 x1, Y1 y1, + X2 x2, Y2 y2, + X3 x3, Y3 y3); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<2, V1, Q> const& v1, + vec<2, V2, Q> const& v2, + vec<2, V3, Q> const& v3, + vec<2, V4, Q> const& v4); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<4, 2, T, Q> & operator=(mat<4, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 2, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<4, 2, T, Q> & operator+=(mat<4, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 2, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<4, 2, T, Q> & operator-=(mat<4, 2, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 2, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<4, 2, T, Q> & operator/=(U s); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<4, 2, T, Q> & operator++ (); + GLM_FUNC_DECL mat<4, 2, T, Q> & operator-- (); + GLM_FUNC_DECL mat<4, 2, T, Q> operator++(int); + GLM_FUNC_DECL mat<4, 2, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator*(mat<4, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator*(T scalar, mat<4, 2, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<4, 2, T, Q>::col_type operator*(mat<4, 2, T, Q> const& m, typename mat<4, 2, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<4, 2, T, Q>::row_type operator*(typename mat<4, 2, T, Q>::col_type const& v, mat<4, 2, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<2, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<3, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<4, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator/(mat<4, 2, T, Q> const& m, T scalar); + + template + GLM_FUNC_DECL mat<4, 2, T, Q> operator/(T scalar, mat<4, 2, T, Q> const& m); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat4x2.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_mat4x2.inl b/MacroLibX/third_party/glm/detail/type_mat4x2.inl new file mode 100644 index 0000000..419c80c --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat4x2.inl @@ -0,0 +1,574 @@ +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0), col_type(0, 1), col_type(0, 0), col_type(0, 0)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0); + this->value[1] = col_type(0, 1); + this->value[2] = col_type(0, 0); + this->value[3] = col_type(0, 0); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<4, 2, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + this->value[3] = m[3]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(T s) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(s, 0), col_type(0, s), col_type(0, 0), col_type(0, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(s, 0); + this->value[1] = col_type(0, s); + this->value[2] = col_type(0, 0); + this->value[3] = col_type(0, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat + ( + T x0, T y0, + T x1, T y1, + T x2, T y2, + T x3, T y3 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0), col_type(x1, y1), col_type(x2, y2), col_type(x3, y3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0); + this->value[1] = col_type(x1, y1); + this->value[2] = col_type(x2, y2); + this->value[3] = col_type(x3, y3); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2, col_type const& v3) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2), col_type(v3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = v0; + this->value[1] = v1; + this->value[2] = v2; + this->value[3] = v3; +# endif + } + + // -- Conversion constructors -- + + template + template< + typename X0, typename Y0, + typename X1, typename Y1, + typename X2, typename Y2, + typename X3, typename Y3> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat + ( + X0 x0, Y0 y0, + X1 x1, Y1 y1, + X2 x2, Y2 y2, + X3 x3, Y3 y3 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0), col_type(x1, y1), col_type(x2, y2), col_type(x3, y3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0); + this->value[1] = col_type(x1, y1); + this->value[2] = col_type(x2, y2); + this->value[3] = col_type(x3, y3); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(vec<2, V0, Q> const& v0, vec<2, V1, Q> const& v1, vec<2, V2, Q> const& v2, vec<2, V3, Q> const& v3) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2), col_type(v3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v0); + this->value[1] = col_type(v1); + this->value[2] = col_type(v2); + this->value[3] = col_type(v3); +# endif + } + + // -- Conversion -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<4, 2, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(m[3]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<2, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<3, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<4, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(m[3]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<2, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<3, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<2, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<4, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(m[3]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<3, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(0); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<4, 2, T, Q>::col_type & mat<4, 2, T, Q>::operator[](typename mat<4, 2, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<4, 2, T, Q>::col_type const& mat<4, 2, T, Q>::operator[](typename mat<4, 2, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary updatable operators -- + + template + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q>& mat<4, 2, T, Q>::operator=(mat<4, 2, U, Q> const& m) + { + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + this->value[3] = m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator+=(U s) + { + this->value[0] += s; + this->value[1] += s; + this->value[2] += s; + this->value[3] += s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator+=(mat<4, 2, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + this->value[2] += m[2]; + this->value[3] += m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator-=(U s) + { + this->value[0] -= s; + this->value[1] -= s; + this->value[2] -= s; + this->value[3] -= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator-=(mat<4, 2, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + this->value[2] -= m[2]; + this->value[3] -= m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator*=(U s) + { + this->value[0] *= s; + this->value[1] *= s; + this->value[2] *= s; + this->value[3] *= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator/=(U s) + { + this->value[0] /= s; + this->value[1] /= s; + this->value[2] /= s; + this->value[3] /= s; + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + ++this->value[2]; + ++this->value[3]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + --this->value[2]; + --this->value[3]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> mat<4, 2, T, Q>::operator++(int) + { + mat<4, 2, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> mat<4, 2, T, Q>::operator--(int) + { + mat<4, 2, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m) + { + return mat<4, 2, T, Q>( + -m[0], + -m[1], + -m[2], + -m[3]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m, T scalar) + { + return mat<4, 2, T, Q>( + m[0] + scalar, + m[1] + scalar, + m[2] + scalar, + m[3] + scalar); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) + { + return mat<4, 2, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1], + m1[2] + m2[2], + m1[3] + m2[3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m, T scalar) + { + return mat<4, 2, T, Q>( + m[0] - scalar, + m[1] - scalar, + m[2] - scalar, + m[3] - scalar); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) + { + return mat<4, 2, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1], + m1[2] - m2[2], + m1[3] - m2[3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(mat<4, 2, T, Q> const& m, T scalar) + { + return mat<4, 2, T, Q>( + m[0] * scalar, + m[1] * scalar, + m[2] * scalar, + m[3] * scalar); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(T scalar, mat<4, 2, T, Q> const& m) + { + return mat<4, 2, T, Q>( + m[0] * scalar, + m[1] * scalar, + m[2] * scalar, + m[3] * scalar); + } + + template + GLM_FUNC_QUALIFIER typename mat<4, 2, T, Q>::col_type operator*(mat<4, 2, T, Q> const& m, typename mat<4, 2, T, Q>::row_type const& v) + { + return typename mat<4, 2, T, Q>::col_type( + m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z + m[3][0] * v.w, + m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z + m[3][1] * v.w); + } + + template + GLM_FUNC_QUALIFIER typename mat<4, 2, T, Q>::row_type operator*(typename mat<4, 2, T, Q>::col_type const& v, mat<4, 2, T, Q> const& m) + { + return typename mat<4, 2, T, Q>::row_type( + v.x * m[0][0] + v.y * m[0][1], + v.x * m[1][0] + v.y * m[1][1], + v.x * m[2][0] + v.y * m[2][1], + v.x * m[3][0] + v.y * m[3][1]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<2, 4, T, Q> const& m2) + { + T const SrcA00 = m1[0][0]; + T const SrcA01 = m1[0][1]; + T const SrcA10 = m1[1][0]; + T const SrcA11 = m1[1][1]; + T const SrcA20 = m1[2][0]; + T const SrcA21 = m1[2][1]; + T const SrcA30 = m1[3][0]; + T const SrcA31 = m1[3][1]; + + T const SrcB00 = m2[0][0]; + T const SrcB01 = m2[0][1]; + T const SrcB02 = m2[0][2]; + T const SrcB03 = m2[0][3]; + T const SrcB10 = m2[1][0]; + T const SrcB11 = m2[1][1]; + T const SrcB12 = m2[1][2]; + T const SrcB13 = m2[1][3]; + + mat<2, 2, T, Q> Result; + Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02 + SrcA30 * SrcB03; + Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02 + SrcA31 * SrcB03; + Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12 + SrcA30 * SrcB13; + Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12 + SrcA31 * SrcB13; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<3, 4, T, Q> const& m2) + { + return mat<3, 2, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2] + m1[3][0] * m2[2][3], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2] + m1[3][1] * m2[2][3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<4, 4, T, Q> const& m2) + { + return mat<4, 2, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2] + m1[3][0] * m2[2][3], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2] + m1[3][1] * m2[2][3], + m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1] + m1[2][0] * m2[3][2] + m1[3][0] * m2[3][3], + m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1] + m1[2][1] * m2[3][2] + m1[3][1] * m2[3][3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator/(mat<4, 2, T, Q> const& m, T scalar) + { + return mat<4, 2, T, Q>( + m[0] / scalar, + m[1] / scalar, + m[2] / scalar, + m[3] / scalar); + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator/(T scalar, mat<4, 2, T, Q> const& m) + { + return mat<4, 2, T, Q>( + scalar / m[0], + scalar / m[1], + scalar / m[2], + scalar / m[3]); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]) && (m1[3] == m2[3]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]) || (m1[3] != m2[3]); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat4x3.hpp b/MacroLibX/third_party/glm/detail/type_mat4x3.hpp new file mode 100644 index 0000000..16e4270 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat4x3.hpp @@ -0,0 +1,171 @@ +/// @ref core +/// @file glm/detail/type_mat4x3.hpp + +#pragma once + +#include "type_vec3.hpp" +#include "type_vec4.hpp" +#include +#include + +namespace glm +{ + template + struct mat<4, 3, T, Q> + { + typedef vec<3, T, Q> col_type; + typedef vec<4, T, Q> row_type; + typedef mat<4, 3, T, Q> type; + typedef mat<3, 4, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[4]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 4; } + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<4, 3, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T const& x); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T const& x0, T const& y0, T const& z0, + T const& x1, T const& y1, T const& z1, + T const& x2, T const& y2, T const& z2, + T const& x3, T const& y3, T const& z3); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v0, + col_type const& v1, + col_type const& v2, + col_type const& v3); + + // -- Conversions -- + + template< + typename X1, typename Y1, typename Z1, + typename X2, typename Y2, typename Z2, + typename X3, typename Y3, typename Z3, + typename X4, typename Y4, typename Z4> + GLM_FUNC_DECL GLM_CONSTEXPR mat( + X1 const& x1, Y1 const& y1, Z1 const& z1, + X2 const& x2, Y2 const& y2, Z2 const& z2, + X3 const& x3, Y3 const& y3, Z3 const& z3, + X4 const& x4, Y4 const& y4, Z4 const& z4); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<3, V1, Q> const& v1, + vec<3, V2, Q> const& v2, + vec<3, V3, Q> const& v3, + vec<3, V4, Q> const& v4); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<4, 3, T, Q> & operator=(mat<4, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 3, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<4, 3, T, Q> & operator+=(mat<4, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 3, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<4, 3, T, Q> & operator-=(mat<4, 3, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 3, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<4, 3, T, Q> & operator/=(U s); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<4, 3, T, Q>& operator++(); + GLM_FUNC_DECL mat<4, 3, T, Q>& operator--(); + GLM_FUNC_DECL mat<4, 3, T, Q> operator++(int); + GLM_FUNC_DECL mat<4, 3, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m, T const& s); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m, T const& s); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator*(mat<4, 3, T, Q> const& m, T const& s); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator*(T const& s, mat<4, 3, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<4, 3, T, Q>::col_type operator*(mat<4, 3, T, Q> const& m, typename mat<4, 3, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<4, 3, T, Q>::row_type operator*(typename mat<4, 3, T, Q>::col_type const& v, mat<4, 3, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<2, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<3, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<4, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator/(mat<4, 3, T, Q> const& m, T const& s); + + template + GLM_FUNC_DECL mat<4, 3, T, Q> operator/(T const& s, mat<4, 3, T, Q> const& m); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat4x3.inl" +#endif //GLM_EXTERNAL_TEMPLATE diff --git a/MacroLibX/third_party/glm/detail/type_mat4x3.inl b/MacroLibX/third_party/glm/detail/type_mat4x3.inl new file mode 100644 index 0000000..11b1ee3 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat4x3.inl @@ -0,0 +1,598 @@ +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0, 0), col_type(0, 1, 0), col_type(0, 0, 1), col_type(0, 0, 0)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0, 0); + this->value[1] = col_type(0, 1, 0); + this->value[2] = col_type(0, 0, 1); + this->value[3] = col_type(0, 0, 0); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<4, 3, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + this->value[3] = m[3]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(T const& s) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(s, 0, 0), col_type(0, s, 0), col_type(0, 0, s), col_type(0, 0, 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(s, 0, 0); + this->value[1] = col_type(0, s, 0); + this->value[2] = col_type(0, 0, s); + this->value[3] = col_type(0, 0, 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat + ( + T const& x0, T const& y0, T const& z0, + T const& x1, T const& y1, T const& z1, + T const& x2, T const& y2, T const& z2, + T const& x3, T const& y3, T const& z3 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0, z0), col_type(x1, y1, z1), col_type(x2, y2, z2), col_type(x3, y3, z3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0, z0); + this->value[1] = col_type(x1, y1, z1); + this->value[2] = col_type(x2, y2, z2); + this->value[3] = col_type(x3, y3, z3); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2, col_type const& v3) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2), col_type(v3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = v0; + this->value[1] = v1; + this->value[2] = v2; + this->value[3] = v3; +# endif + } + + // -- Conversion constructors -- + + template + template< + typename X0, typename Y0, typename Z0, + typename X1, typename Y1, typename Z1, + typename X2, typename Y2, typename Z2, + typename X3, typename Y3, typename Z3> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat + ( + X0 const& x0, Y0 const& y0, Z0 const& z0, + X1 const& x1, Y1 const& y1, Z1 const& z1, + X2 const& x2, Y2 const& y2, Z2 const& z2, + X3 const& x3, Y3 const& y3, Z3 const& z3 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x0, y0, z0), col_type(x1, y1, z1), col_type(x2, y2, z2), col_type(x3, y3, z3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0, z0); + this->value[1] = col_type(x1, y1, z1); + this->value[2] = col_type(x2, y2, z2); + this->value[3] = col_type(x3, y3, z3); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(vec<3, V1, Q> const& v1, vec<3, V2, Q> const& v2, vec<3, V3, Q> const& v3, vec<3, V4, Q> const& v4) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v1), col_type(v2), col_type(v3), col_type(v4)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v1); + this->value[1] = col_type(v2); + this->value[2] = col_type(v3); + this->value[3] = col_type(v4); +# endif + } + + // -- Matrix conversions -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<4, 3, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(m[3]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<2, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(0, 0, 1), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(0, 0, 1); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<3, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<4, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(m[3]); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<2, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0, 0, 1); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<3, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 1), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(m[2], 1); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<2, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(0, 0, 1); + this->value[3] = col_type(0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<4, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 1), col_type(m[3], 0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(m[2], 1); + this->value[3] = col_type(m[3], 0); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<3, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(0); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<4, 3, T, Q>::col_type & mat<4, 3, T, Q>::operator[](typename mat<4, 3, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<4, 3, T, Q>::col_type const& mat<4, 3, T, Q>::operator[](typename mat<4, 3, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary updatable operators -- + + template + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q>& mat<4, 3, T, Q>::operator=(mat<4, 3, U, Q> const& m) + { + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + this->value[3] = m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator+=(U s) + { + this->value[0] += s; + this->value[1] += s; + this->value[2] += s; + this->value[3] += s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator+=(mat<4, 3, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + this->value[2] += m[2]; + this->value[3] += m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator-=(U s) + { + this->value[0] -= s; + this->value[1] -= s; + this->value[2] -= s; + this->value[3] -= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator-=(mat<4, 3, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + this->value[2] -= m[2]; + this->value[3] -= m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator*=(U s) + { + this->value[0] *= s; + this->value[1] *= s; + this->value[2] *= s; + this->value[3] *= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator/=(U s) + { + this->value[0] /= s; + this->value[1] /= s; + this->value[2] /= s; + this->value[3] /= s; + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + ++this->value[2]; + ++this->value[3]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + --this->value[2]; + --this->value[3]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> mat<4, 3, T, Q>::operator++(int) + { + mat<4, 3, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> mat<4, 3, T, Q>::operator--(int) + { + mat<4, 3, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m) + { + return mat<4, 3, T, Q>( + -m[0], + -m[1], + -m[2], + -m[3]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m, T const& s) + { + return mat<4, 3, T, Q>( + m[0] + s, + m[1] + s, + m[2] + s, + m[3] + s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) + { + return mat<4, 3, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1], + m1[2] + m2[2], + m1[3] + m2[3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m, T const& s) + { + return mat<4, 3, T, Q>( + m[0] - s, + m[1] - s, + m[2] - s, + m[3] - s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) + { + return mat<4, 3, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1], + m1[2] - m2[2], + m1[3] - m2[3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(mat<4, 3, T, Q> const& m, T const& s) + { + return mat<4, 3, T, Q>( + m[0] * s, + m[1] * s, + m[2] * s, + m[3] * s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(T const& s, mat<4, 3, T, Q> const& m) + { + return mat<4, 3, T, Q>( + m[0] * s, + m[1] * s, + m[2] * s, + m[3] * s); + } + + template + GLM_FUNC_QUALIFIER typename mat<4, 3, T, Q>::col_type operator* + ( + mat<4, 3, T, Q> const& m, + typename mat<4, 3, T, Q>::row_type const& v) + { + return typename mat<4, 3, T, Q>::col_type( + m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z + m[3][0] * v.w, + m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z + m[3][1] * v.w, + m[0][2] * v.x + m[1][2] * v.y + m[2][2] * v.z + m[3][2] * v.w); + } + + template + GLM_FUNC_QUALIFIER typename mat<4, 3, T, Q>::row_type operator* + ( + typename mat<4, 3, T, Q>::col_type const& v, + mat<4, 3, T, Q> const& m) + { + return typename mat<4, 3, T, Q>::row_type( + v.x * m[0][0] + v.y * m[0][1] + v.z * m[0][2], + v.x * m[1][0] + v.y * m[1][1] + v.z * m[1][2], + v.x * m[2][0] + v.y * m[2][1] + v.z * m[2][2], + v.x * m[3][0] + v.y * m[3][1] + v.z * m[3][2]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<2, 4, T, Q> const& m2) + { + return mat<2, 3, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2] + m1[3][2] * m2[0][3], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2] + m1[3][2] * m2[1][3]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<3, 4, T, Q> const& m2) + { + T const SrcA00 = m1[0][0]; + T const SrcA01 = m1[0][1]; + T const SrcA02 = m1[0][2]; + T const SrcA10 = m1[1][0]; + T const SrcA11 = m1[1][1]; + T const SrcA12 = m1[1][2]; + T const SrcA20 = m1[2][0]; + T const SrcA21 = m1[2][1]; + T const SrcA22 = m1[2][2]; + T const SrcA30 = m1[3][0]; + T const SrcA31 = m1[3][1]; + T const SrcA32 = m1[3][2]; + + T const SrcB00 = m2[0][0]; + T const SrcB01 = m2[0][1]; + T const SrcB02 = m2[0][2]; + T const SrcB03 = m2[0][3]; + T const SrcB10 = m2[1][0]; + T const SrcB11 = m2[1][1]; + T const SrcB12 = m2[1][2]; + T const SrcB13 = m2[1][3]; + T const SrcB20 = m2[2][0]; + T const SrcB21 = m2[2][1]; + T const SrcB22 = m2[2][2]; + T const SrcB23 = m2[2][3]; + + mat<3, 3, T, Q> Result; + Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02 + SrcA30 * SrcB03; + Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02 + SrcA31 * SrcB03; + Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01 + SrcA22 * SrcB02 + SrcA32 * SrcB03; + Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12 + SrcA30 * SrcB13; + Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12 + SrcA31 * SrcB13; + Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11 + SrcA22 * SrcB12 + SrcA32 * SrcB13; + Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21 + SrcA20 * SrcB22 + SrcA30 * SrcB23; + Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21 + SrcA21 * SrcB22 + SrcA31 * SrcB23; + Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21 + SrcA22 * SrcB22 + SrcA32 * SrcB23; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<4, 4, T, Q> const& m2) + { + return mat<4, 3, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2] + m1[3][2] * m2[0][3], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2] + m1[3][2] * m2[1][3], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2] + m1[3][0] * m2[2][3], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2] + m1[3][1] * m2[2][3], + m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1] + m1[2][2] * m2[2][2] + m1[3][2] * m2[2][3], + m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1] + m1[2][0] * m2[3][2] + m1[3][0] * m2[3][3], + m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1] + m1[2][1] * m2[3][2] + m1[3][1] * m2[3][3], + m1[0][2] * m2[3][0] + m1[1][2] * m2[3][1] + m1[2][2] * m2[3][2] + m1[3][2] * m2[3][3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator/(mat<4, 3, T, Q> const& m, T const& s) + { + return mat<4, 3, T, Q>( + m[0] / s, + m[1] / s, + m[2] / s, + m[3] / s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator/(T const& s, mat<4, 3, T, Q> const& m) + { + return mat<4, 3, T, Q>( + s / m[0], + s / m[1], + s / m[2], + s / m[3]); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]) && (m1[3] == m2[3]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]) || (m1[3] != m2[3]); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_mat4x4.hpp b/MacroLibX/third_party/glm/detail/type_mat4x4.hpp new file mode 100644 index 0000000..3517f9f --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat4x4.hpp @@ -0,0 +1,189 @@ +/// @ref core +/// @file glm/detail/type_mat4x4.hpp + +#pragma once + +#include "type_vec4.hpp" +#include +#include + +namespace glm +{ + template + struct mat<4, 4, T, Q> + { + typedef vec<4, T, Q> col_type; + typedef vec<4, T, Q> row_type; + typedef mat<4, 4, T, Q> type; + typedef mat<4, 4, T, Q> transpose_type; + typedef T value_type; + + private: + col_type value[4]; + + public: + // -- Accesses -- + + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 4;} + + GLM_FUNC_DECL col_type & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; + + // -- Constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<4, 4, T, P> const& m); + + GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T const& x); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + T const& x0, T const& y0, T const& z0, T const& w0, + T const& x1, T const& y1, T const& z1, T const& w1, + T const& x2, T const& y2, T const& z2, T const& w2, + T const& x3, T const& y3, T const& z3, T const& w3); + GLM_FUNC_DECL GLM_CONSTEXPR mat( + col_type const& v0, + col_type const& v1, + col_type const& v2, + col_type const& v3); + + // -- Conversions -- + + template< + typename X1, typename Y1, typename Z1, typename W1, + typename X2, typename Y2, typename Z2, typename W2, + typename X3, typename Y3, typename Z3, typename W3, + typename X4, typename Y4, typename Z4, typename W4> + GLM_FUNC_DECL GLM_CONSTEXPR mat( + X1 const& x1, Y1 const& y1, Z1 const& z1, W1 const& w1, + X2 const& x2, Y2 const& y2, Z2 const& z2, W2 const& w2, + X3 const& x3, Y3 const& y3, Z3 const& z3, W3 const& w3, + X4 const& x4, Y4 const& y4, Z4 const& z4, W4 const& w4); + + template + GLM_FUNC_DECL GLM_CONSTEXPR mat( + vec<4, V1, Q> const& v1, + vec<4, V2, Q> const& v2, + vec<4, V3, Q> const& v3, + vec<4, V4, Q> const& v4); + + // -- Matrix conversions -- + + template + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, U, P> const& m); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator=(mat<4, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator+=(U s); + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator+=(mat<4, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator-=(U s); + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator-=(mat<4, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator*=(U s); + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator*=(mat<4, 4, U, Q> const& m); + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator/=(U s); + template + GLM_FUNC_DECL mat<4, 4, T, Q> & operator/=(mat<4, 4, U, Q> const& m); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL mat<4, 4, T, Q> & operator++(); + GLM_FUNC_DECL mat<4, 4, T, Q> & operator--(); + GLM_FUNC_DECL mat<4, 4, T, Q> operator++(int); + GLM_FUNC_DECL mat<4, 4, T, Q> operator--(int); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m); + + // -- Binary operators -- + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m, T const& s); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator+(T const& s, mat<4, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m, T const& s); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator-(T const& s, mat<4, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator*(mat<4, 4, T, Q> const& m, T const& s); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator*(T const& s, mat<4, 4, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<4, 4, T, Q>::col_type operator*(mat<4, 4, T, Q> const& m, typename mat<4, 4, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<4, 4, T, Q>::row_type operator*(typename mat<4, 4, T, Q>::col_type const& v, mat<4, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<2, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<3, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator/(mat<4, 4, T, Q> const& m, T const& s); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator/(T const& s, mat<4, 4, T, Q> const& m); + + template + GLM_FUNC_DECL typename mat<4, 4, T, Q>::col_type operator/(mat<4, 4, T, Q> const& m, typename mat<4, 4, T, Q>::row_type const& v); + + template + GLM_FUNC_DECL typename mat<4, 4, T, Q>::row_type operator/(typename mat<4, 4, T, Q>::col_type const& v, mat<4, 4, T, Q> const& m); + + template + GLM_FUNC_DECL mat<4, 4, T, Q> operator/(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); + + template + GLM_FUNC_DECL bool operator!=(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_mat4x4.inl" +#endif//GLM_EXTERNAL_TEMPLATE diff --git a/MacroLibX/third_party/glm/detail/type_mat4x4.inl b/MacroLibX/third_party/glm/detail/type_mat4x4.inl new file mode 100644 index 0000000..e38b87f --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat4x4.inl @@ -0,0 +1,706 @@ +#include "../matrix.hpp" + +namespace glm +{ + // -- Constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat() +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST + : value{col_type(1, 0, 0, 0), col_type(0, 1, 0, 0), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} +# endif + { +# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION + this->value[0] = col_type(1, 0, 0, 0); + this->value[1] = col_type(0, 1, 0, 0); + this->value[2] = col_type(0, 0, 1, 0); + this->value[3] = col_type(0, 0, 0, 1); +# endif + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<4, 4, T, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + this->value[3] = m[3]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(T const& s) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(s, 0, 0, 0), col_type(0, s, 0, 0), col_type(0, 0, s, 0), col_type(0, 0, 0, s)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(s, 0, 0, 0); + this->value[1] = col_type(0, s, 0, 0); + this->value[2] = col_type(0, 0, s, 0); + this->value[3] = col_type(0, 0, 0, s); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat + ( + T const& x0, T const& y0, T const& z0, T const& w0, + T const& x1, T const& y1, T const& z1, T const& w1, + T const& x2, T const& y2, T const& z2, T const& w2, + T const& x3, T const& y3, T const& z3, T const& w3 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{ + col_type(x0, y0, z0, w0), + col_type(x1, y1, z1, w1), + col_type(x2, y2, z2, w2), + col_type(x3, y3, z3, w3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x0, y0, z0, w0); + this->value[1] = col_type(x1, y1, z1, w1); + this->value[2] = col_type(x2, y2, z2, w2); + this->value[3] = col_type(x3, y3, z3, w3); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2, col_type const& v3) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v0), col_type(v1), col_type(v2), col_type(v3)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = v0; + this->value[1] = v1; + this->value[2] = v2; + this->value[3] = v3; +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<4, 4, U, P> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0]); + this->value[1] = col_type(m[1]); + this->value[2] = col_type(m[2]); + this->value[3] = col_type(m[3]); +# endif + } + + // -- Conversions -- + + template + template< + typename X1, typename Y1, typename Z1, typename W1, + typename X2, typename Y2, typename Z2, typename W2, + typename X3, typename Y3, typename Z3, typename W3, + typename X4, typename Y4, typename Z4, typename W4> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat + ( + X1 const& x1, Y1 const& y1, Z1 const& z1, W1 const& w1, + X2 const& x2, Y2 const& y2, Z2 const& z2, W2 const& w2, + X3 const& x3, Y3 const& y3, Z3 const& z3, W3 const& w3, + X4 const& x4, Y4 const& y4, Z4 const& z4, W4 const& w4 + ) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(x1, y1, z1, w1), col_type(x2, y2, z2, w2), col_type(x3, y3, z3, w3), col_type(x4, y4, z4, w4)} +# endif + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 1st parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 2nd parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 3rd parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 4th parameter type invalid."); + + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 5th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 6th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 7th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 8th parameter type invalid."); + + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 9th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 10th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 11th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 12th parameter type invalid."); + + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 13th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 14th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 15th parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 16th parameter type invalid."); + +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(x1, y1, z1, w1); + this->value[1] = col_type(x2, y2, z2, w2); + this->value[2] = col_type(x3, y3, z3, w3); + this->value[3] = col_type(x4, y4, z4, w4); +# endif + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(vec<4, V1, Q> const& v1, vec<4, V2, Q> const& v2, vec<4, V3, Q> const& v3, vec<4, V4, Q> const& v4) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(v1), col_type(v2), col_type(v3), col_type(v4)} +# endif + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 1st parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 2nd parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 3rd parameter type invalid."); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 4th parameter type invalid."); + +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(v1); + this->value[1] = col_type(v2); + this->value[2] = col_type(v3); + this->value[3] = col_type(v4); +# endif + } + + // -- Matrix conversions -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<2, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); + this->value[2] = col_type(0, 0, 1, 0); + this->value[3] = col_type(0, 0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<3, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 0), col_type(0, 0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(m[2], 0); + this->value[3] = col_type(0, 0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<2, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(0, 0, 1, 0); + this->value[3] = col_type(0, 0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<3, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(m[2], 1, 0), col_type(0, 0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); + this->value[2] = col_type(m[2], 1, 0); + this->value[3] = col_type(0, 0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<2, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = col_type(0, 0, 1, 0); + this->value[3] = col_type(0, 0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<4, 2, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0, 0); + this->value[1] = col_type(m[1], 0, 0); + this->value[2] = col_type(0, 0, 1, 0); + this->value[3] = col_type(0, 0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<3, 4, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0, 0, 0, 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + this->value[3] = col_type(0, 0, 0, 1); +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<4, 3, T, Q> const& m) +# if GLM_HAS_INITIALIZER_LISTS + : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 0), col_type(m[3], 1)} +# endif + { +# if !GLM_HAS_INITIALIZER_LISTS + this->value[0] = col_type(m[0], 0); + this->value[1] = col_type(m[1], 0); + this->value[2] = col_type(m[2], 0); + this->value[3] = col_type(m[3], 1); +# endif + } + + // -- Accesses -- + + template + GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::col_type & mat<4, 4, T, Q>::operator[](typename mat<4, 4, T, Q>::length_type i) + { + assert(i < this->length()); + return this->value[i]; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<4, 4, T, Q>::col_type const& mat<4, 4, T, Q>::operator[](typename mat<4, 4, T, Q>::length_type i) const + { + assert(i < this->length()); + return this->value[i]; + } + + // -- Unary arithmetic operators -- + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q>& mat<4, 4, T, Q>::operator=(mat<4, 4, U, Q> const& m) + { + //memcpy could be faster + //memcpy(&this->value, &m.value, 16 * sizeof(valType)); + this->value[0] = m[0]; + this->value[1] = m[1]; + this->value[2] = m[2]; + this->value[3] = m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q>& mat<4, 4, T, Q>::operator+=(U s) + { + this->value[0] += s; + this->value[1] += s; + this->value[2] += s; + this->value[3] += s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q>& mat<4, 4, T, Q>::operator+=(mat<4, 4, U, Q> const& m) + { + this->value[0] += m[0]; + this->value[1] += m[1]; + this->value[2] += m[2]; + this->value[3] += m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator-=(U s) + { + this->value[0] -= s; + this->value[1] -= s; + this->value[2] -= s; + this->value[3] -= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator-=(mat<4, 4, U, Q> const& m) + { + this->value[0] -= m[0]; + this->value[1] -= m[1]; + this->value[2] -= m[2]; + this->value[3] -= m[3]; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator*=(U s) + { + this->value[0] *= s; + this->value[1] *= s; + this->value[2] *= s; + this->value[3] *= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator*=(mat<4, 4, U, Q> const& m) + { + return (*this = *this * m); + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator/=(U s) + { + this->value[0] /= s; + this->value[1] /= s; + this->value[2] /= s; + this->value[3] /= s; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator/=(mat<4, 4, U, Q> const& m) + { + return *this *= inverse(m); + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator++() + { + ++this->value[0]; + ++this->value[1]; + ++this->value[2]; + ++this->value[3]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator--() + { + --this->value[0]; + --this->value[1]; + --this->value[2]; + --this->value[3]; + return *this; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> mat<4, 4, T, Q>::operator++(int) + { + mat<4, 4, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> mat<4, 4, T, Q>::operator--(int) + { + mat<4, 4, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary constant operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m) + { + return m; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m) + { + return mat<4, 4, T, Q>( + -m[0], + -m[1], + -m[2], + -m[3]); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m, T const& s) + { + return mat<4, 4, T, Q>( + m[0] + s, + m[1] + s, + m[2] + s, + m[3] + s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator+(T const& s, mat<4, 4, T, Q> const& m) + { + return mat<4, 4, T, Q>( + m[0] + s, + m[1] + s, + m[2] + s, + m[3] + s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) + { + return mat<4, 4, T, Q>( + m1[0] + m2[0], + m1[1] + m2[1], + m1[2] + m2[2], + m1[3] + m2[3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m, T const& s) + { + return mat<4, 4, T, Q>( + m[0] - s, + m[1] - s, + m[2] - s, + m[3] - s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator-(T const& s, mat<4, 4, T, Q> const& m) + { + return mat<4, 4, T, Q>( + s - m[0], + s - m[1], + s - m[2], + s - m[3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) + { + return mat<4, 4, T, Q>( + m1[0] - m2[0], + m1[1] - m2[1], + m1[2] - m2[2], + m1[3] - m2[3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(mat<4, 4, T, Q> const& m, T const & s) + { + return mat<4, 4, T, Q>( + m[0] * s, + m[1] * s, + m[2] * s, + m[3] * s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(T const& s, mat<4, 4, T, Q> const& m) + { + return mat<4, 4, T, Q>( + m[0] * s, + m[1] * s, + m[2] * s, + m[3] * s); + } + + template + GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::col_type operator* + ( + mat<4, 4, T, Q> const& m, + typename mat<4, 4, T, Q>::row_type const& v + ) + { +/* + __m128 v0 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 v1 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(1, 1, 1, 1)); + __m128 v2 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(2, 2, 2, 2)); + __m128 v3 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 m0 = _mm_mul_ps(m[0].data, v0); + __m128 m1 = _mm_mul_ps(m[1].data, v1); + __m128 a0 = _mm_add_ps(m0, m1); + + __m128 m2 = _mm_mul_ps(m[2].data, v2); + __m128 m3 = _mm_mul_ps(m[3].data, v3); + __m128 a1 = _mm_add_ps(m2, m3); + + __m128 a2 = _mm_add_ps(a0, a1); + + return typename mat<4, 4, T, Q>::col_type(a2); +*/ + + typename mat<4, 4, T, Q>::col_type const Mov0(v[0]); + typename mat<4, 4, T, Q>::col_type const Mov1(v[1]); + typename mat<4, 4, T, Q>::col_type const Mul0 = m[0] * Mov0; + typename mat<4, 4, T, Q>::col_type const Mul1 = m[1] * Mov1; + typename mat<4, 4, T, Q>::col_type const Add0 = Mul0 + Mul1; + typename mat<4, 4, T, Q>::col_type const Mov2(v[2]); + typename mat<4, 4, T, Q>::col_type const Mov3(v[3]); + typename mat<4, 4, T, Q>::col_type const Mul2 = m[2] * Mov2; + typename mat<4, 4, T, Q>::col_type const Mul3 = m[3] * Mov3; + typename mat<4, 4, T, Q>::col_type const Add1 = Mul2 + Mul3; + typename mat<4, 4, T, Q>::col_type const Add2 = Add0 + Add1; + return Add2; + +/* + return typename mat<4, 4, T, Q>::col_type( + m[0][0] * v[0] + m[1][0] * v[1] + m[2][0] * v[2] + m[3][0] * v[3], + m[0][1] * v[0] + m[1][1] * v[1] + m[2][1] * v[2] + m[3][1] * v[3], + m[0][2] * v[0] + m[1][2] * v[1] + m[2][2] * v[2] + m[3][2] * v[3], + m[0][3] * v[0] + m[1][3] * v[1] + m[2][3] * v[2] + m[3][3] * v[3]); +*/ + } + + template + GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::row_type operator* + ( + typename mat<4, 4, T, Q>::col_type const& v, + mat<4, 4, T, Q> const& m + ) + { + return typename mat<4, 4, T, Q>::row_type( + m[0][0] * v[0] + m[0][1] * v[1] + m[0][2] * v[2] + m[0][3] * v[3], + m[1][0] * v[0] + m[1][1] * v[1] + m[1][2] * v[2] + m[1][3] * v[3], + m[2][0] * v[0] + m[2][1] * v[1] + m[2][2] * v[2] + m[2][3] * v[3], + m[3][0] * v[0] + m[3][1] * v[1] + m[3][2] * v[2] + m[3][3] * v[3]); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) + { + return mat<2, 4, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2] + m1[3][2] * m2[0][3], + m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1] + m1[2][3] * m2[0][2] + m1[3][3] * m2[0][3], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2] + m1[3][2] * m2[1][3], + m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1] + m1[2][3] * m2[1][2] + m1[3][3] * m2[1][3]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) + { + return mat<3, 4, T, Q>( + m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], + m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], + m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2] + m1[3][2] * m2[0][3], + m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1] + m1[2][3] * m2[0][2] + m1[3][3] * m2[0][3], + m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], + m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], + m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2] + m1[3][2] * m2[1][3], + m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1] + m1[2][3] * m2[1][2] + m1[3][3] * m2[1][3], + m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2] + m1[3][0] * m2[2][3], + m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2] + m1[3][1] * m2[2][3], + m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1] + m1[2][2] * m2[2][2] + m1[3][2] * m2[2][3], + m1[0][3] * m2[2][0] + m1[1][3] * m2[2][1] + m1[2][3] * m2[2][2] + m1[3][3] * m2[2][3]); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) + { + typename mat<4, 4, T, Q>::col_type const SrcA0 = m1[0]; + typename mat<4, 4, T, Q>::col_type const SrcA1 = m1[1]; + typename mat<4, 4, T, Q>::col_type const SrcA2 = m1[2]; + typename mat<4, 4, T, Q>::col_type const SrcA3 = m1[3]; + + typename mat<4, 4, T, Q>::col_type const SrcB0 = m2[0]; + typename mat<4, 4, T, Q>::col_type const SrcB1 = m2[1]; + typename mat<4, 4, T, Q>::col_type const SrcB2 = m2[2]; + typename mat<4, 4, T, Q>::col_type const SrcB3 = m2[3]; + + mat<4, 4, T, Q> Result; + Result[0] = SrcA0 * SrcB0[0] + SrcA1 * SrcB0[1] + SrcA2 * SrcB0[2] + SrcA3 * SrcB0[3]; + Result[1] = SrcA0 * SrcB1[0] + SrcA1 * SrcB1[1] + SrcA2 * SrcB1[2] + SrcA3 * SrcB1[3]; + Result[2] = SrcA0 * SrcB2[0] + SrcA1 * SrcB2[1] + SrcA2 * SrcB2[2] + SrcA3 * SrcB2[3]; + Result[3] = SrcA0 * SrcB3[0] + SrcA1 * SrcB3[1] + SrcA2 * SrcB3[2] + SrcA3 * SrcB3[3]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator/(mat<4, 4, T, Q> const& m, T const& s) + { + return mat<4, 4, T, Q>( + m[0] / s, + m[1] / s, + m[2] / s, + m[3] / s); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator/(T const& s, mat<4, 4, T, Q> const& m) + { + return mat<4, 4, T, Q>( + s / m[0], + s / m[1], + s / m[2], + s / m[3]); + } + + template + GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::col_type operator/(mat<4, 4, T, Q> const& m, typename mat<4, 4, T, Q>::row_type const& v) + { + return inverse(m) * v; + } + + template + GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::row_type operator/(typename mat<4, 4, T, Q>::col_type const& v, mat<4, 4, T, Q> const& m) + { + return v * inverse(m); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator/(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) + { + mat<4, 4, T, Q> m1_copy(m1); + return m1_copy /= m2; + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) + { + return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]) && (m1[3] == m2[3]); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) + { + return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]) || (m1[3] != m2[3]); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "type_mat4x4_simd.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_mat4x4_simd.inl b/MacroLibX/third_party/glm/detail/type_mat4x4_simd.inl new file mode 100644 index 0000000..fb3a16f --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_mat4x4_simd.inl @@ -0,0 +1,6 @@ +/// @ref core + +namespace glm +{ + +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_quat.hpp b/MacroLibX/third_party/glm/detail/type_quat.hpp new file mode 100644 index 0000000..0e60bc3 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_quat.hpp @@ -0,0 +1,186 @@ +/// @ref core +/// @file glm/detail/type_quat.hpp + +#pragma once + +// Dependency: +#include "../detail/type_mat3x3.hpp" +#include "../detail/type_mat4x4.hpp" +#include "../detail/type_vec3.hpp" +#include "../detail/type_vec4.hpp" +#include "../ext/vector_relational.hpp" +#include "../ext/quaternion_relational.hpp" +#include "../gtc/constants.hpp" +#include "../gtc/matrix_transform.hpp" + +namespace glm +{ + template + struct qua + { + // -- Implementation detail -- + + typedef qua type; + typedef T value_type; + + // -- Data -- + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wpedantic" +# elif GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" +# pragma clang diagnostic ignored "-Wnested-anon-types" +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(push) +# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union +# endif +# endif + +# if GLM_LANG & GLM_LANG_CXXMS_FLAG + union + { +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + struct { T w, x, y, z; }; +# else + struct { T x, y, z, w; }; +# endif + + typename detail::storage<4, T, detail::is_aligned::value>::type data; + }; +# else +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + T w, x, y, z; +# else + T x, y, z, w; +# endif +# endif + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(pop) +# endif +# endif + + // -- Component accesses -- + + typedef length_t length_type; + + /// Return the count of components of a quaternion + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 4;} + + GLM_FUNC_DECL GLM_CONSTEXPR T & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; + + // -- Implicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR qua() GLM_DEFAULT; + GLM_FUNC_DECL GLM_CONSTEXPR qua(qua const& q) GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR qua(qua const& q); + + // -- Explicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR qua(T s, vec<3, T, Q> const& v); + GLM_FUNC_DECL GLM_CONSTEXPR qua(T w, T x, T y, T z); + + // -- Conversion constructors -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT qua(qua const& q); + + /// Explicit conversion operators +# if GLM_HAS_EXPLICIT_CONVERSION_OPERATORS + GLM_FUNC_DECL explicit operator mat<3, 3, T, Q>() const; + GLM_FUNC_DECL explicit operator mat<4, 4, T, Q>() const; +# endif + + /// Create a quaternion from two normalized axis + /// + /// @param u A first normalized axis + /// @param v A second normalized axis + /// @see gtc_quaternion + /// @see http://lolengine.net/blog/2013/09/18/beautiful-maths-quaternion-from-vectors + GLM_FUNC_DECL qua(vec<3, T, Q> const& u, vec<3, T, Q> const& v); + + /// Build a quaternion from euler angles (pitch, yaw, roll), in radians. + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT qua(vec<3, T, Q> const& eulerAngles); + GLM_FUNC_DECL GLM_EXPLICIT qua(mat<3, 3, T, Q> const& q); + GLM_FUNC_DECL GLM_EXPLICIT qua(mat<4, 4, T, Q> const& q); + + // -- Unary arithmetic operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR qua& operator=(qua const& q) GLM_DEFAULT; + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua& operator=(qua const& q); + template + GLM_FUNC_DECL GLM_CONSTEXPR qua& operator+=(qua const& q); + template + GLM_FUNC_DECL GLM_CONSTEXPR qua& operator-=(qua const& q); + template + GLM_FUNC_DECL GLM_CONSTEXPR qua& operator*=(qua const& q); + template + GLM_FUNC_DECL GLM_CONSTEXPR qua& operator*=(U s); + template + GLM_FUNC_DECL GLM_CONSTEXPR qua& operator/=(U s); + }; + + // -- Unary bit operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua operator+(qua const& q); + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua operator-(qua const& q); + + // -- Binary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua operator+(qua const& q, qua const& p); + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua operator-(qua const& q, qua const& p); + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua operator*(qua const& q, qua const& p); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(qua const& q, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, qua const& q); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(qua const& q, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v, qua const& q); + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua operator*(qua const& q, T const& s); + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua operator*(T const& s, qua const& q); + + template + GLM_FUNC_DECL GLM_CONSTEXPR qua operator/(qua const& q, T const& s); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(qua const& q1, qua const& q2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(qua const& q1, qua const& q2); +} //namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_quat.inl" +#endif//GLM_EXTERNAL_TEMPLATE diff --git a/MacroLibX/third_party/glm/detail/type_quat.inl b/MacroLibX/third_party/glm/detail/type_quat.inl new file mode 100644 index 0000000..67b9310 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_quat.inl @@ -0,0 +1,408 @@ +#include "../trigonometric.hpp" +#include "../exponential.hpp" +#include "../ext/quaternion_geometric.hpp" +#include + +namespace glm{ +namespace detail +{ + template + struct genTypeTrait > + { + static const genTypeEnum GENTYPE = GENTYPE_QUAT; + }; + + template + struct compute_dot, T, Aligned> + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static T call(qua const& a, qua const& b) + { + vec<4, T, Q> tmp(a.w * b.w, a.x * b.x, a.y * b.y, a.z * b.z); + return (tmp.x + tmp.y) + (tmp.z + tmp.w); + } + }; + + template + struct compute_quat_add + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static qua call(qua const& q, qua const& p) + { + return qua(q.w + p.w, q.x + p.x, q.y + p.y, q.z + p.z); + } + }; + + template + struct compute_quat_sub + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static qua call(qua const& q, qua const& p) + { + return qua(q.w - p.w, q.x - p.x, q.y - p.y, q.z - p.z); + } + }; + + template + struct compute_quat_mul_scalar + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static qua call(qua const& q, T s) + { + return qua(q.w * s, q.x * s, q.y * s, q.z * s); + } + }; + + template + struct compute_quat_div_scalar + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static qua call(qua const& q, T s) + { + return qua(q.w / s, q.x / s, q.y / s, q.z / s); + } + }; + + template + struct compute_quat_mul_vec4 + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(qua const& q, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(q * vec<3, T, Q>(v), v.w); + } + }; +}//namespace detail + + // -- Component accesses -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T & qua::operator[](typename qua::length_type i) + { + assert(i >= 0 && i < this->length()); +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + return (&w)[i]; +# else + return (&x)[i]; +# endif + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& qua::operator[](typename qua::length_type i) const + { + assert(i >= 0 && i < this->length()); +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + return (&w)[i]; +# else + return (&x)[i]; +# endif + } + + // -- Implicit basic constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua() +# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + : w(1), x(0), y(0), z(0) +# else + : x(0), y(0), z(0), w(1) +# endif +# endif + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(qua const& q) +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + : w(q.w), x(q.x), y(q.y), z(q.z) +# else + : x(q.x), y(q.y), z(q.z), w(q.w) +# endif + {} +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(qua const& q) +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + : w(q.w), x(q.x), y(q.y), z(q.z) +# else + : x(q.x), y(q.y), z(q.z), w(q.w) +# endif + {} + + // -- Explicit basic constructors -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(T s, vec<3, T, Q> const& v) +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + : w(s), x(v.x), y(v.y), z(v.z) +# else + : x(v.x), y(v.y), z(v.z), w(s) +# endif + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(T _w, T _x, T _y, T _z) +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + : w(_w), x(_x), y(_y), z(_z) +# else + : x(_x), y(_y), z(_z), w(_w) +# endif + {} + + // -- Conversion constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(qua const& q) +# ifdef GLM_FORCE_QUAT_DATA_WXYZ + : w(static_cast(q.w)), x(static_cast(q.x)), y(static_cast(q.y)), z(static_cast(q.z)) +# else + : x(static_cast(q.x)), y(static_cast(q.y)), z(static_cast(q.z)), w(static_cast(q.w)) +# endif + {} + + //template + //GLM_FUNC_QUALIFIER qua::qua + //( + // valType const& pitch, + // valType const& yaw, + // valType const& roll + //) + //{ + // vec<3, valType> eulerAngle(pitch * valType(0.5), yaw * valType(0.5), roll * valType(0.5)); + // vec<3, valType> c = glm::cos(eulerAngle * valType(0.5)); + // vec<3, valType> s = glm::sin(eulerAngle * valType(0.5)); + // + // this->w = c.x * c.y * c.z + s.x * s.y * s.z; + // this->x = s.x * c.y * c.z - c.x * s.y * s.z; + // this->y = c.x * s.y * c.z + s.x * c.y * s.z; + // this->z = c.x * c.y * s.z - s.x * s.y * c.z; + //} + + template + GLM_FUNC_QUALIFIER qua::qua(vec<3, T, Q> const& u, vec<3, T, Q> const& v) + { + T norm_u_norm_v = sqrt(dot(u, u) * dot(v, v)); + T real_part = norm_u_norm_v + dot(u, v); + vec<3, T, Q> t; + + if(real_part < static_cast(1.e-6f) * norm_u_norm_v) + { + // If u and v are exactly opposite, rotate 180 degrees + // around an arbitrary orthogonal axis. Axis normalisation + // can happen later, when we normalise the quaternion. + real_part = static_cast(0); + t = abs(u.x) > abs(u.z) ? vec<3, T, Q>(-u.y, u.x, static_cast(0)) : vec<3, T, Q>(static_cast(0), -u.z, u.y); + } + else + { + // Otherwise, build quaternion the standard way. + t = cross(u, v); + } + + *this = normalize(qua(real_part, t.x, t.y, t.z)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(vec<3, T, Q> const& eulerAngle) + { + vec<3, T, Q> c = glm::cos(eulerAngle * T(0.5)); + vec<3, T, Q> s = glm::sin(eulerAngle * T(0.5)); + + this->w = c.x * c.y * c.z + s.x * s.y * s.z; + this->x = s.x * c.y * c.z - c.x * s.y * s.z; + this->y = c.x * s.y * c.z + s.x * c.y * s.z; + this->z = c.x * c.y * s.z - s.x * s.y * c.z; + } + + template + GLM_FUNC_QUALIFIER qua::qua(mat<3, 3, T, Q> const& m) + { + *this = quat_cast(m); + } + + template + GLM_FUNC_QUALIFIER qua::qua(mat<4, 4, T, Q> const& m) + { + *this = quat_cast(m); + } + +# if GLM_HAS_EXPLICIT_CONVERSION_OPERATORS + template + GLM_FUNC_QUALIFIER qua::operator mat<3, 3, T, Q>() const + { + return mat3_cast(*this); + } + + template + GLM_FUNC_QUALIFIER qua::operator mat<4, 4, T, Q>() const + { + return mat4_cast(*this); + } +# endif//GLM_HAS_EXPLICIT_CONVERSION_OPERATORS + + // -- Unary arithmetic operators -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator=(qua const& q) + { + this->w = q.w; + this->x = q.x; + this->y = q.y; + this->z = q.z; + return *this; + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator=(qua const& q) + { + this->w = static_cast(q.w); + this->x = static_cast(q.x); + this->y = static_cast(q.y); + this->z = static_cast(q.z); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator+=(qua const& q) + { + return (*this = detail::compute_quat_add::value>::call(*this, qua(q))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator-=(qua const& q) + { + return (*this = detail::compute_quat_sub::value>::call(*this, qua(q))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator*=(qua const& r) + { + qua const p(*this); + qua const q(r); + + this->w = p.w * q.w - p.x * q.x - p.y * q.y - p.z * q.z; + this->x = p.w * q.x + p.x * q.w + p.y * q.z - p.z * q.y; + this->y = p.w * q.y + p.y * q.w + p.z * q.x - p.x * q.z; + this->z = p.w * q.z + p.z * q.w + p.x * q.y - p.y * q.x; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator*=(U s) + { + return (*this = detail::compute_quat_mul_scalar::value>::call(*this, static_cast(s))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator/=(U s) + { + return (*this = detail::compute_quat_div_scalar::value>::call(*this, static_cast(s))); + } + + // -- Unary bit operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator+(qua const& q) + { + return q; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator-(qua const& q) + { + return qua(-q.w, -q.x, -q.y, -q.z); + } + + // -- Binary operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator+(qua const& q, qua const& p) + { + return qua(q) += p; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator-(qua const& q, qua const& p) + { + return qua(q) -= p; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator*(qua const& q, qua const& p) + { + return qua(q) *= p; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(qua const& q, vec<3, T, Q> const& v) + { + vec<3, T, Q> const QuatVector(q.x, q.y, q.z); + vec<3, T, Q> const uv(glm::cross(QuatVector, v)); + vec<3, T, Q> const uuv(glm::cross(QuatVector, uv)); + + return v + ((uv * q.w) + uuv) * static_cast(2); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, qua const& q) + { + return glm::inverse(q) * v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(qua const& q, vec<4, T, Q> const& v) + { + return detail::compute_quat_mul_vec4::value>::call(q, v); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v, qua const& q) + { + return glm::inverse(q) * v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator*(qua const& q, T const& s) + { + return qua( + q.w * s, q.x * s, q.y * s, q.z * s); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator*(T const& s, qua const& q) + { + return q * s; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator/(qua const& q, T const& s) + { + return qua( + q.w / s, q.x / s, q.y / s, q.z / s); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(qua const& q1, qua const& q2) + { + return q1.x == q2.x && q1.y == q2.y && q1.z == q2.z && q1.w == q2.w; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(qua const& q1, qua const& q2) + { + return q1.x != q2.x || q1.y != q2.y || q1.z != q2.z || q1.w != q2.w; + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "type_quat_simd.inl" +#endif + diff --git a/MacroLibX/third_party/glm/detail/type_quat_simd.inl b/MacroLibX/third_party/glm/detail/type_quat_simd.inl new file mode 100644 index 0000000..3333e59 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_quat_simd.inl @@ -0,0 +1,188 @@ +/// @ref core + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +namespace glm{ +namespace detail +{ +/* + template + struct compute_quat_mul + { + static qua call(qua const& q1, qua const& q2) + { + // SSE2 STATS: 11 shuffle, 8 mul, 8 add + // SSE4 STATS: 3 shuffle, 4 mul, 4 dpps + + __m128 const mul0 = _mm_mul_ps(q1.Data, _mm_shuffle_ps(q2.Data, q2.Data, _MM_SHUFFLE(0, 1, 2, 3))); + __m128 const mul1 = _mm_mul_ps(q1.Data, _mm_shuffle_ps(q2.Data, q2.Data, _MM_SHUFFLE(1, 0, 3, 2))); + __m128 const mul2 = _mm_mul_ps(q1.Data, _mm_shuffle_ps(q2.Data, q2.Data, _MM_SHUFFLE(2, 3, 0, 1))); + __m128 const mul3 = _mm_mul_ps(q1.Data, q2.Data); + +# if GLM_ARCH & GLM_ARCH_SSE41_BIT + __m128 const add0 = _mm_dp_ps(mul0, _mm_set_ps(1.0f, -1.0f, 1.0f, 1.0f), 0xff); + __m128 const add1 = _mm_dp_ps(mul1, _mm_set_ps(1.0f, 1.0f, 1.0f, -1.0f), 0xff); + __m128 const add2 = _mm_dp_ps(mul2, _mm_set_ps(1.0f, 1.0f, -1.0f, 1.0f), 0xff); + __m128 const add3 = _mm_dp_ps(mul3, _mm_set_ps(1.0f, -1.0f, -1.0f, -1.0f), 0xff); +# else + __m128 const mul4 = _mm_mul_ps(mul0, _mm_set_ps(1.0f, -1.0f, 1.0f, 1.0f)); + __m128 const add0 = _mm_add_ps(mul0, _mm_movehl_ps(mul4, mul4)); + __m128 const add4 = _mm_add_ss(add0, _mm_shuffle_ps(add0, add0, 1)); + + __m128 const mul5 = _mm_mul_ps(mul1, _mm_set_ps(1.0f, 1.0f, 1.0f, -1.0f)); + __m128 const add1 = _mm_add_ps(mul1, _mm_movehl_ps(mul5, mul5)); + __m128 const add5 = _mm_add_ss(add1, _mm_shuffle_ps(add1, add1, 1)); + + __m128 const mul6 = _mm_mul_ps(mul2, _mm_set_ps(1.0f, 1.0f, -1.0f, 1.0f)); + __m128 const add2 = _mm_add_ps(mul6, _mm_movehl_ps(mul6, mul6)); + __m128 const add6 = _mm_add_ss(add2, _mm_shuffle_ps(add2, add2, 1)); + + __m128 const mul7 = _mm_mul_ps(mul3, _mm_set_ps(1.0f, -1.0f, -1.0f, -1.0f)); + __m128 const add3 = _mm_add_ps(mul3, _mm_movehl_ps(mul7, mul7)); + __m128 const add7 = _mm_add_ss(add3, _mm_shuffle_ps(add3, add3, 1)); + #endif + + // This SIMD code is a politically correct way of doing this, but in every test I've tried it has been slower than + // the final code below. I'll keep this here for reference - maybe somebody else can do something better... + // + //__m128 xxyy = _mm_shuffle_ps(add4, add5, _MM_SHUFFLE(0, 0, 0, 0)); + //__m128 zzww = _mm_shuffle_ps(add6, add7, _MM_SHUFFLE(0, 0, 0, 0)); + // + //return _mm_shuffle_ps(xxyy, zzww, _MM_SHUFFLE(2, 0, 2, 0)); + + qua Result; + _mm_store_ss(&Result.x, add4); + _mm_store_ss(&Result.y, add5); + _mm_store_ss(&Result.z, add6); + _mm_store_ss(&Result.w, add7); + return Result; + } + }; +*/ + + template + struct compute_quat_add + { + static qua call(qua const& q, qua const& p) + { + qua Result; + Result.data = _mm_add_ps(q.data, p.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template + struct compute_quat_add + { + static qua call(qua const& a, qua const& b) + { + qua Result; + Result.data = _mm256_add_pd(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_quat_sub + { + static qua call(qua const& q, qua const& p) + { + vec<4, float, Q> Result; + Result.data = _mm_sub_ps(q.data, p.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template + struct compute_quat_sub + { + static qua call(qua const& a, qua const& b) + { + qua Result; + Result.data = _mm256_sub_pd(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_quat_mul_scalar + { + static qua call(qua const& q, float s) + { + vec<4, float, Q> Result; + Result.data = _mm_mul_ps(q.data, _mm_set_ps1(s)); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template + struct compute_quat_mul_scalar + { + static qua call(qua const& q, double s) + { + qua Result; + Result.data = _mm256_mul_pd(q.data, _mm_set_ps1(s)); + return Result; + } + }; +# endif + + template + struct compute_quat_div_scalar + { + static qua call(qua const& q, float s) + { + vec<4, float, Q> Result; + Result.data = _mm_div_ps(q.data, _mm_set_ps1(s)); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template + struct compute_quat_div_scalar + { + static qua call(qua const& q, double s) + { + qua Result; + Result.data = _mm256_div_pd(q.data, _mm_set_ps1(s)); + return Result; + } + }; +# endif + + template + struct compute_quat_mul_vec4 + { + static vec<4, float, Q> call(qua const& q, vec<4, float, Q> const& v) + { + __m128 const q_wwww = _mm_shuffle_ps(q.data, q.data, _MM_SHUFFLE(3, 3, 3, 3)); + __m128 const q_swp0 = _mm_shuffle_ps(q.data, q.data, _MM_SHUFFLE(3, 0, 2, 1)); + __m128 const q_swp1 = _mm_shuffle_ps(q.data, q.data, _MM_SHUFFLE(3, 1, 0, 2)); + __m128 const v_swp0 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(3, 0, 2, 1)); + __m128 const v_swp1 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(3, 1, 0, 2)); + + __m128 uv = _mm_sub_ps(_mm_mul_ps(q_swp0, v_swp1), _mm_mul_ps(q_swp1, v_swp0)); + __m128 uv_swp0 = _mm_shuffle_ps(uv, uv, _MM_SHUFFLE(3, 0, 2, 1)); + __m128 uv_swp1 = _mm_shuffle_ps(uv, uv, _MM_SHUFFLE(3, 1, 0, 2)); + __m128 uuv = _mm_sub_ps(_mm_mul_ps(q_swp0, uv_swp1), _mm_mul_ps(q_swp1, uv_swp0)); + + __m128 const two = _mm_set1_ps(2.0f); + uv = _mm_mul_ps(uv, _mm_mul_ps(q_wwww, two)); + uuv = _mm_mul_ps(uuv, two); + + vec<4, float, Q> Result; + Result.data = _mm_add_ps(v.Data, _mm_add_ps(uv, uuv)); + return Result; + } + }; +}//namespace detail +}//namespace glm + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT + diff --git a/MacroLibX/third_party/glm/detail/type_vec1.hpp b/MacroLibX/third_party/glm/detail/type_vec1.hpp new file mode 100644 index 0000000..51163f1 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec1.hpp @@ -0,0 +1,308 @@ +/// @ref core +/// @file glm/detail/type_vec1.hpp + +#pragma once + +#include "qualifier.hpp" +#if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR +# include "_swizzle.hpp" +#elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION +# include "_swizzle_func.hpp" +#endif +#include + +namespace glm +{ + template + struct vec<1, T, Q> + { + // -- Implementation detail -- + + typedef T value_type; + typedef vec<1, T, Q> type; + typedef vec<1, bool, Q> bool_type; + + // -- Data -- + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wpedantic" +# elif GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" +# pragma clang diagnostic ignored "-Wnested-anon-types" +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(push) +# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union +# endif +# endif + +# if GLM_CONFIG_XYZW_ONLY + T x; +# elif GLM_CONFIG_ANONYMOUS_STRUCT == GLM_ENABLE + union + { + T x; + T r; + T s; + + typename detail::storage<1, T, detail::is_aligned::value>::type data; +/* +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + _GLM_SWIZZLE1_2_MEMBERS(T, Q, x) + _GLM_SWIZZLE1_2_MEMBERS(T, Q, r) + _GLM_SWIZZLE1_2_MEMBERS(T, Q, s) + _GLM_SWIZZLE1_3_MEMBERS(T, Q, x) + _GLM_SWIZZLE1_3_MEMBERS(T, Q, r) + _GLM_SWIZZLE1_3_MEMBERS(T, Q, s) + _GLM_SWIZZLE1_4_MEMBERS(T, Q, x) + _GLM_SWIZZLE1_4_MEMBERS(T, Q, r) + _GLM_SWIZZLE1_4_MEMBERS(T, Q, s) +# endif +*/ + }; +# else + union {T x, r, s;}; +/* +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION + GLM_SWIZZLE_GEN_VEC_FROM_VEC1(T, Q) +# endif +*/ +# endif + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(pop) +# endif +# endif + + // -- Component accesses -- + + /// Return the count of components of the vector + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 1;} + + GLM_FUNC_DECL GLM_CONSTEXPR T & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; + + // -- Implicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec() GLM_DEFAULT; + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec const& v) GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, T, P> const& v); + + // -- Explicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(T scalar); + + // -- Conversion vector constructors -- + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<2, U, P> const& v); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<3, U, P> const& v); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<4, U, P> const& v); + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<1, U, P> const& v); + + // -- Swizzle constructors -- +/* +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<1, T, Q, E0, -1,-2,-3> const& that) + { + *this = that(); + } +# endif//GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR +*/ + // -- Unary arithmetic operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator=(vec const& v) GLM_DEFAULT; + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator+=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator+=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator-=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator-=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator*=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator*=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator/=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator/=(vec<1, U, Q> const& v); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator++(); + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator--(); + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator++(int); + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator--(int); + + // -- Unary bit operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator%=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator%=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator&=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator&=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator|=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator|=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator^=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator^=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator<<=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator<<=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator>>=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator>>=(vec<1, U, Q> const& v); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v); + + // -- Binary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator+(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator-(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator*(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator*(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator*(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator/(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator/(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator/(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator%(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator%(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator%(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator&(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator&(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator&(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator|(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator|(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator|(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator^(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator^(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator^(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator<<(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator<<(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator<<(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator>>(vec<1, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator>>(T scalar, vec<1, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator>>(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator~(vec<1, T, Q> const& v); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, bool, Q> operator&&(vec<1, bool, Q> const& v1, vec<1, bool, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<1, bool, Q> operator||(vec<1, bool, Q> const& v1, vec<1, bool, Q> const& v2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_vec1.inl" +#endif//GLM_EXTERNAL_TEMPLATE diff --git a/MacroLibX/third_party/glm/detail/type_vec1.inl b/MacroLibX/third_party/glm/detail/type_vec1.inl new file mode 100644 index 0000000..d0f49fd --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec1.inl @@ -0,0 +1,551 @@ +/// @ref core + +#include "./compute_vector_relational.hpp" + +namespace glm +{ + // -- Implicit basic constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec() +# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE + : x(0) +# endif + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<1, T, Q> const& v) + : x(v.x) + {} +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<1, T, P> const& v) + : x(v.x) + {} + + // -- Explicit basic constructors -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(T scalar) + : x(scalar) + {} + + // -- Conversion vector constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<1, U, P> const& v) + : x(static_cast(v.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<2, U, P> const& v) + : x(static_cast(v.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<3, U, P> const& v) + : x(static_cast(v.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<4, U, P> const& v) + : x(static_cast(v.x)) + {} + + // -- Component accesses -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T & vec<1, T, Q>::operator[](typename vec<1, T, Q>::length_type) + { + return x; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& vec<1, T, Q>::operator[](typename vec<1, T, Q>::length_type) const + { + return x; + } + + // -- Unary arithmetic operators -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator=(vec<1, T, Q> const& v) + { + this->x = v.x; + return *this; + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator=(vec<1, U, Q> const& v) + { + this->x = static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator+=(U scalar) + { + this->x += static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator+=(vec<1, U, Q> const& v) + { + this->x += static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator-=(U scalar) + { + this->x -= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator-=(vec<1, U, Q> const& v) + { + this->x -= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator*=(U scalar) + { + this->x *= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator*=(vec<1, U, Q> const& v) + { + this->x *= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator/=(U scalar) + { + this->x /= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator/=(vec<1, U, Q> const& v) + { + this->x /= static_cast(v.x); + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator++() + { + ++this->x; + return *this; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator--() + { + --this->x; + return *this; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> vec<1, T, Q>::operator++(int) + { + vec<1, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> vec<1, T, Q>::operator--(int) + { + vec<1, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary bit operators -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator%=(U scalar) + { + this->x %= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator%=(vec<1, U, Q> const& v) + { + this->x %= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator&=(U scalar) + { + this->x &= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator&=(vec<1, U, Q> const& v) + { + this->x &= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator|=(U scalar) + { + this->x |= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator|=(vec<1, U, Q> const& v) + { + this->x |= U(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator^=(U scalar) + { + this->x ^= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator^=(vec<1, U, Q> const& v) + { + this->x ^= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator<<=(U scalar) + { + this->x <<= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator<<=(vec<1, U, Q> const& v) + { + this->x <<= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator>>=(U scalar) + { + this->x >>= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator>>=(vec<1, U, Q> const& v) + { + this->x >>= static_cast(v.x); + return *this; + } + + // -- Unary constant operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v) + { + return v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + -v.x); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x + scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator+(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar + v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x + v2.x); + } + + //operator- + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x - scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator-(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar - v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x - v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator*(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x * scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator*(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar * v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator*(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x * v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator/(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x / scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator/(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar / v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator/(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x / v2.x); + } + + // -- Binary bit operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator%(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x % scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator%(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar % v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator%(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x % v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator&(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x & scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator&(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar & v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator&(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x & v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator|(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x | scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator|(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar | v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator|(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x | v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator^(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x ^ scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator^(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar ^ v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator^(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x ^ v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator<<(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + static_cast(v.x << scalar)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator<<(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar << v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator<<(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x << v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator>>(vec<1, T, Q> const& v, T scalar) + { + return vec<1, T, Q>( + v.x >> scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator>>(T scalar, vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + scalar >> v.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator>>(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<1, T, Q>( + v1.x >> v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator~(vec<1, T, Q> const& v) + { + return vec<1, T, Q>( + ~v.x); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return detail::compute_equal::is_iec559>::call(v1.x, v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return !(v1 == v2); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, bool, Q> operator&&(vec<1, bool, Q> const& v1, vec<1, bool, Q> const& v2) + { + return vec<1, bool, Q>(v1.x && v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, bool, Q> operator||(vec<1, bool, Q> const& v1, vec<1, bool, Q> const& v2) + { + return vec<1, bool, Q>(v1.x || v2.x); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_vec2.hpp b/MacroLibX/third_party/glm/detail/type_vec2.hpp new file mode 100644 index 0000000..52ef408 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec2.hpp @@ -0,0 +1,399 @@ +/// @ref core +/// @file glm/detail/type_vec2.hpp + +#pragma once + +#include "qualifier.hpp" +#if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR +# include "_swizzle.hpp" +#elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION +# include "_swizzle_func.hpp" +#endif +#include + +namespace glm +{ + template + struct vec<2, T, Q> + { + // -- Implementation detail -- + + typedef T value_type; + typedef vec<2, T, Q> type; + typedef vec<2, bool, Q> bool_type; + + // -- Data -- + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wpedantic" +# elif GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" +# pragma clang diagnostic ignored "-Wnested-anon-types" +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(push) +# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union +# endif +# endif + +# if GLM_CONFIG_XYZW_ONLY + T x, y; +# elif GLM_CONFIG_ANONYMOUS_STRUCT == GLM_ENABLE + union + { + struct{ T x, y; }; + struct{ T r, g; }; + struct{ T s, t; }; + + typename detail::storage<2, T, detail::is_aligned::value>::type data; + +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + GLM_SWIZZLE2_2_MEMBERS(T, Q, x, y) + GLM_SWIZZLE2_2_MEMBERS(T, Q, r, g) + GLM_SWIZZLE2_2_MEMBERS(T, Q, s, t) + GLM_SWIZZLE2_3_MEMBERS(T, Q, x, y) + GLM_SWIZZLE2_3_MEMBERS(T, Q, r, g) + GLM_SWIZZLE2_3_MEMBERS(T, Q, s, t) + GLM_SWIZZLE2_4_MEMBERS(T, Q, x, y) + GLM_SWIZZLE2_4_MEMBERS(T, Q, r, g) + GLM_SWIZZLE2_4_MEMBERS(T, Q, s, t) +# endif + }; +# else + union {T x, r, s;}; + union {T y, g, t;}; + +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION + GLM_SWIZZLE_GEN_VEC_FROM_VEC2(T, Q) +# endif//GLM_CONFIG_SWIZZLE +# endif + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(pop) +# endif +# endif + + // -- Component accesses -- + + /// Return the count of components of the vector + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 2;} + + GLM_FUNC_DECL GLM_CONSTEXPR T& operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; + + // -- Implicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec() GLM_DEFAULT; + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec const& v) GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, T, P> const& v); + + // -- Explicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR vec(T x, T y); + + // -- Conversion constructors -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(vec<1, U, P> const& v); + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(A x, B y); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, Q> const& x, B y); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(A x, vec<1, B, Q> const& y); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, Q> const& x, vec<1, B, Q> const& y); + + // -- Conversion vector constructors -- + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<3, U, P> const& v); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<4, U, P> const& v); + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<2, U, P> const& v); + + // -- Swizzle constructors -- +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<2, T, Q, E0, E1,-1,-2> const& that) + { + *this = that(); + } +# endif//GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + + // -- Unary arithmetic operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator=(vec const& v) GLM_DEFAULT; + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator+=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator+=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator+=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator-=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator-=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator-=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator*=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator*=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator*=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator/=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator/=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator/=(vec<2, U, Q> const& v); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator++(); + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator--(); + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator++(int); + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator--(int); + + // -- Unary bit operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator%=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator%=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator%=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator&=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator&=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator&=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator|=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator|=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator|=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator^=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator^=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator^=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator<<=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator<<=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator<<=(vec<2, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator>>=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator>>=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator>>=(vec<2, U, Q> const& v); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v); + + // -- Binary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(T scalar, vec<2, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator~(vec<2, T, Q> const& v); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, bool, Q> operator&&(vec<2, bool, Q> const& v1, vec<2, bool, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<2, bool, Q> operator||(vec<2, bool, Q> const& v1, vec<2, bool, Q> const& v2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_vec2.inl" +#endif//GLM_EXTERNAL_TEMPLATE diff --git a/MacroLibX/third_party/glm/detail/type_vec2.inl b/MacroLibX/third_party/glm/detail/type_vec2.inl new file mode 100644 index 0000000..8e65d6b --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec2.inl @@ -0,0 +1,913 @@ +/// @ref core + +#include "./compute_vector_relational.hpp" + +namespace glm +{ + // -- Implicit basic constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec() +# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE + : x(0), y(0) +# endif + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<2, T, Q> const& v) + : x(v.x), y(v.y) + {} +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<2, T, P> const& v) + : x(v.x), y(v.y) + {} + + // -- Explicit basic constructors -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(T scalar) + : x(scalar), y(scalar) + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(T _x, T _y) + : x(_x), y(_y) + {} + + // -- Conversion scalar constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<1, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(A _x, B _y) + : x(static_cast(_x)) + , y(static_cast(_y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<1, A, Q> const& _x, B _y) + : x(static_cast(_x.x)) + , y(static_cast(_y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(A _x, vec<1, B, Q> const& _y) + : x(static_cast(_x)) + , y(static_cast(_y.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<1, A, Q> const& _x, vec<1, B, Q> const& _y) + : x(static_cast(_x.x)) + , y(static_cast(_y.x)) + {} + + // -- Conversion vector constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<2, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<3, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<4, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.y)) + {} + + // -- Component accesses -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T & vec<2, T, Q>::operator[](typename vec<2, T, Q>::length_type i) + { + assert(i >= 0 && i < this->length()); + switch(i) + { + default: + case 0: + return x; + case 1: + return y; + } + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& vec<2, T, Q>::operator[](typename vec<2, T, Q>::length_type i) const + { + assert(i >= 0 && i < this->length()); + switch(i) + { + default: + case 0: + return x; + case 1: + return y; + } + } + + // -- Unary arithmetic operators -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator=(vec<2, T, Q> const& v) + { + this->x = v.x; + this->y = v.y; + return *this; + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator=(vec<2, U, Q> const& v) + { + this->x = static_cast(v.x); + this->y = static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator+=(U scalar) + { + this->x += static_cast(scalar); + this->y += static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator+=(vec<1, U, Q> const& v) + { + this->x += static_cast(v.x); + this->y += static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator+=(vec<2, U, Q> const& v) + { + this->x += static_cast(v.x); + this->y += static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator-=(U scalar) + { + this->x -= static_cast(scalar); + this->y -= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator-=(vec<1, U, Q> const& v) + { + this->x -= static_cast(v.x); + this->y -= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator-=(vec<2, U, Q> const& v) + { + this->x -= static_cast(v.x); + this->y -= static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator*=(U scalar) + { + this->x *= static_cast(scalar); + this->y *= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator*=(vec<1, U, Q> const& v) + { + this->x *= static_cast(v.x); + this->y *= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator*=(vec<2, U, Q> const& v) + { + this->x *= static_cast(v.x); + this->y *= static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator/=(U scalar) + { + this->x /= static_cast(scalar); + this->y /= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator/=(vec<1, U, Q> const& v) + { + this->x /= static_cast(v.x); + this->y /= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator/=(vec<2, U, Q> const& v) + { + this->x /= static_cast(v.x); + this->y /= static_cast(v.y); + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator++() + { + ++this->x; + ++this->y; + return *this; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator--() + { + --this->x; + --this->y; + return *this; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> vec<2, T, Q>::operator++(int) + { + vec<2, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> vec<2, T, Q>::operator--(int) + { + vec<2, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary bit operators -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator%=(U scalar) + { + this->x %= static_cast(scalar); + this->y %= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator%=(vec<1, U, Q> const& v) + { + this->x %= static_cast(v.x); + this->y %= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator%=(vec<2, U, Q> const& v) + { + this->x %= static_cast(v.x); + this->y %= static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator&=(U scalar) + { + this->x &= static_cast(scalar); + this->y &= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator&=(vec<1, U, Q> const& v) + { + this->x &= static_cast(v.x); + this->y &= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator&=(vec<2, U, Q> const& v) + { + this->x &= static_cast(v.x); + this->y &= static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator|=(U scalar) + { + this->x |= static_cast(scalar); + this->y |= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator|=(vec<1, U, Q> const& v) + { + this->x |= static_cast(v.x); + this->y |= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator|=(vec<2, U, Q> const& v) + { + this->x |= static_cast(v.x); + this->y |= static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator^=(U scalar) + { + this->x ^= static_cast(scalar); + this->y ^= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator^=(vec<1, U, Q> const& v) + { + this->x ^= static_cast(v.x); + this->y ^= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator^=(vec<2, U, Q> const& v) + { + this->x ^= static_cast(v.x); + this->y ^= static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator<<=(U scalar) + { + this->x <<= static_cast(scalar); + this->y <<= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator<<=(vec<1, U, Q> const& v) + { + this->x <<= static_cast(v.x); + this->y <<= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator<<=(vec<2, U, Q> const& v) + { + this->x <<= static_cast(v.x); + this->y <<= static_cast(v.y); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator>>=(U scalar) + { + this->x >>= static_cast(scalar); + this->y >>= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator>>=(vec<1, U, Q> const& v) + { + this->x >>= static_cast(v.x); + this->y >>= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator>>=(vec<2, U, Q> const& v) + { + this->x >>= static_cast(v.x); + this->y >>= static_cast(v.y); + return *this; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v) + { + return v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + -v.x, + -v.y); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x + scalar, + v.y + scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x + v2.x, + v1.y + v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar + v.x, + scalar + v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x + v2.x, + v1.x + v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x + v2.x, + v1.y + v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x - scalar, + v.y - scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x - v2.x, + v1.y - v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar - v.x, + scalar - v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x - v2.x, + v1.x - v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x - v2.x, + v1.y - v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x * scalar, + v.y * scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x * v2.x, + v1.y * v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar * v.x, + scalar * v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x * v2.x, + v1.x * v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x * v2.x, + v1.y * v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x / scalar, + v.y / scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x / v2.x, + v1.y / v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar / v.x, + scalar / v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x / v2.x, + v1.x / v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x / v2.x, + v1.y / v2.y); + } + + // -- Binary bit operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x % scalar, + v.y % scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x % v2.x, + v1.y % v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar % v.x, + scalar % v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x % v2.x, + v1.x % v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x % v2.x, + v1.y % v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x & scalar, + v.y & scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x & v2.x, + v1.y & v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar & v.x, + scalar & v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x & v2.x, + v1.x & v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x & v2.x, + v1.y & v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x | scalar, + v.y | scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x | v2.x, + v1.y | v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar | v.x, + scalar | v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x | v2.x, + v1.x | v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x | v2.x, + v1.y | v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x ^ scalar, + v.y ^ scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x ^ v2.x, + v1.y ^ v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar ^ v.x, + scalar ^ v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x ^ v2.x, + v1.x ^ v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x ^ v2.x, + v1.y ^ v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x << scalar, + v.y << scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x << v2.x, + v1.y << v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar << v.x, + scalar << v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x << v2.x, + v1.x << v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x << v2.x, + v1.y << v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v, T scalar) + { + return vec<2, T, Q>( + v.x >> scalar, + v.y >> scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x >> v2.x, + v1.y >> v2.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(T scalar, vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + scalar >> v.x, + scalar >> v.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x >> v2.x, + v1.x >> v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return vec<2, T, Q>( + v1.x >> v2.x, + v1.y >> v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator~(vec<2, T, Q> const& v) + { + return vec<2, T, Q>( + ~v.x, + ~v.y); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return + detail::compute_equal::is_iec559>::call(v1.x, v2.x) && + detail::compute_equal::is_iec559>::call(v1.y, v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) + { + return !(v1 == v2); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, bool, Q> operator&&(vec<2, bool, Q> const& v1, vec<2, bool, Q> const& v2) + { + return vec<2, bool, Q>(v1.x && v2.x, v1.y && v2.y); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, bool, Q> operator||(vec<2, bool, Q> const& v1, vec<2, bool, Q> const& v2) + { + return vec<2, bool, Q>(v1.x || v2.x, v1.y || v2.y); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_vec3.hpp b/MacroLibX/third_party/glm/detail/type_vec3.hpp new file mode 100644 index 0000000..d83cde6 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec3.hpp @@ -0,0 +1,432 @@ +/// @ref core +/// @file glm/detail/type_vec3.hpp + +#pragma once + +#include "qualifier.hpp" +#if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR +# include "_swizzle.hpp" +#elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION +# include "_swizzle_func.hpp" +#endif +#include + +namespace glm +{ + template + struct vec<3, T, Q> + { + // -- Implementation detail -- + + typedef T value_type; + typedef vec<3, T, Q> type; + typedef vec<3, bool, Q> bool_type; + + // -- Data -- + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wpedantic" +# elif GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" +# pragma clang diagnostic ignored "-Wnested-anon-types" +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(push) +# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union +# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE +# pragma warning(disable: 4324) // structure was padded due to alignment specifier +# endif +# endif +# endif + +# if GLM_CONFIG_XYZW_ONLY + T x, y, z; +# elif GLM_CONFIG_ANONYMOUS_STRUCT == GLM_ENABLE + union + { + struct{ T x, y, z; }; + struct{ T r, g, b; }; + struct{ T s, t, p; }; + + typename detail::storage<3, T, detail::is_aligned::value>::type data; + +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + GLM_SWIZZLE3_2_MEMBERS(T, Q, x, y, z) + GLM_SWIZZLE3_2_MEMBERS(T, Q, r, g, b) + GLM_SWIZZLE3_2_MEMBERS(T, Q, s, t, p) + GLM_SWIZZLE3_3_MEMBERS(T, Q, x, y, z) + GLM_SWIZZLE3_3_MEMBERS(T, Q, r, g, b) + GLM_SWIZZLE3_3_MEMBERS(T, Q, s, t, p) + GLM_SWIZZLE3_4_MEMBERS(T, Q, x, y, z) + GLM_SWIZZLE3_4_MEMBERS(T, Q, r, g, b) + GLM_SWIZZLE3_4_MEMBERS(T, Q, s, t, p) +# endif + }; +# else + union { T x, r, s; }; + union { T y, g, t; }; + union { T z, b, p; }; + +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION + GLM_SWIZZLE_GEN_VEC_FROM_VEC3(T, Q) +# endif//GLM_CONFIG_SWIZZLE +# endif//GLM_LANG + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(pop) +# endif +# endif + + // -- Component accesses -- + + /// Return the count of components of the vector + typedef length_t length_type; + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 3;} + + GLM_FUNC_DECL GLM_CONSTEXPR T & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; + + // -- Implicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec() GLM_DEFAULT; + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec const& v) GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<3, T, P> const& v); + + // -- Explicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR vec(T a, T b, T c); + + // -- Conversion scalar constructors -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(vec<1, U, P> const& v); + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X x, Y y, Z z); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, Z _z); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, Z _z); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, Y _y, vec<1, Z, Q> const& _z); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z); + + // -- Conversion vector constructors -- + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, B _z); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<2, B, P> const& _yz); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<4, U, P> const& v); + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<3, U, P> const& v); + + // -- Swizzle constructors -- +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<3, T, Q, E0, E1, E2, -1> const& that) + { + *this = that(); + } + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v, T const& scalar) + { + *this = vec(v(), scalar); + } + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(T const& scalar, detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v) + { + *this = vec(scalar, v()); + } +# endif//GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + + // -- Unary arithmetic operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q>& operator=(vec<3, T, Q> const& v) GLM_DEFAULT; + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator+=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator+=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator+=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator-=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator-=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator-=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator*=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator*=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator*=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator/=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator/=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator/=(vec<3, U, Q> const& v); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator++(); + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator--(); + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator++(int); + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator--(int); + + // -- Unary bit operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator%=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator%=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator%=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator&=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator&=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator&=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator|=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator|=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator|=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator^=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator^=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator^=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator<<=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator<<=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator<<=(vec<3, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator>>=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator>>=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator>>=(vec<3, U, Q> const& v); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v); + + // -- Binary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v1, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(T scalar, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator~(vec<3, T, Q> const& v); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, bool, Q> operator&&(vec<3, bool, Q> const& v1, vec<3, bool, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<3, bool, Q> operator||(vec<3, bool, Q> const& v1, vec<3, bool, Q> const& v2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_vec3.inl" +#endif//GLM_EXTERNAL_TEMPLATE diff --git a/MacroLibX/third_party/glm/detail/type_vec3.inl b/MacroLibX/third_party/glm/detail/type_vec3.inl new file mode 100644 index 0000000..6532c9e --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec3.inl @@ -0,0 +1,1068 @@ +/// @ref core + +#include "compute_vector_relational.hpp" + +namespace glm +{ + // -- Implicit basic constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec() +# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE + : x(0), y(0), z(0) +# endif + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<3, T, Q> const& v) + : x(v.x), y(v.y), z(v.z) + {} +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<3, T, P> const& v) + : x(v.x), y(v.y), z(v.z) + {} + + // -- Explicit basic constructors -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(T scalar) + : x(scalar), y(scalar), z(scalar) + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(T _x, T _y, T _z) + : x(_x), y(_y), z(_z) + {} + + // -- Conversion scalar constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.x)) + , z(static_cast(v.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(X _x, Y _y, Z _z) + : x(static_cast(_x)) + , y(static_cast(_y)) + , z(static_cast(_z)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, Z _z) + : x(static_cast(_x.x)) + , y(static_cast(_y)) + , z(static_cast(_z)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, Z _z) + : x(static_cast(_x)) + , y(static_cast(_y.x)) + , z(static_cast(_z)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z) + : x(static_cast(_x.x)) + , y(static_cast(_y.x)) + , z(static_cast(_z)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(X _x, Y _y, vec<1, Z, Q> const& _z) + : x(static_cast(_x)) + , y(static_cast(_y)) + , z(static_cast(_z.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z) + : x(static_cast(_x.x)) + , y(static_cast(_y)) + , z(static_cast(_z.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z) + : x(static_cast(_x)) + , y(static_cast(_y.x)) + , z(static_cast(_z.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z) + : x(static_cast(_x.x)) + , y(static_cast(_y.x)) + , z(static_cast(_z.x)) + {} + + // -- Conversion vector constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<2, A, P> const& _xy, B _z) + : x(static_cast(_xy.x)) + , y(static_cast(_xy.y)) + , z(static_cast(_z)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z) + : x(static_cast(_xy.x)) + , y(static_cast(_xy.y)) + , z(static_cast(_z.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(A _x, vec<2, B, P> const& _yz) + : x(static_cast(_x)) + , y(static_cast(_yz.x)) + , z(static_cast(_yz.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz) + : x(static_cast(_x.x)) + , y(static_cast(_yz.x)) + , z(static_cast(_yz.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<3, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.y)) + , z(static_cast(v.z)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<4, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.y)) + , z(static_cast(v.z)) + {} + + // -- Component accesses -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T & vec<3, T, Q>::operator[](typename vec<3, T, Q>::length_type i) + { + assert(i >= 0 && i < this->length()); + switch(i) + { + default: + case 0: + return x; + case 1: + return y; + case 2: + return z; + } + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& vec<3, T, Q>::operator[](typename vec<3, T, Q>::length_type i) const + { + assert(i >= 0 && i < this->length()); + switch(i) + { + default: + case 0: + return x; + case 1: + return y; + case 2: + return z; + } + } + + // -- Unary arithmetic operators -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>& vec<3, T, Q>::operator=(vec<3, T, Q> const& v) + { + this->x = v.x; + this->y = v.y; + this->z = v.z; + return *this; + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>& vec<3, T, Q>::operator=(vec<3, U, Q> const& v) + { + this->x = static_cast(v.x); + this->y = static_cast(v.y); + this->z = static_cast(v.z); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator+=(U scalar) + { + this->x += static_cast(scalar); + this->y += static_cast(scalar); + this->z += static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator+=(vec<1, U, Q> const& v) + { + this->x += static_cast(v.x); + this->y += static_cast(v.x); + this->z += static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator+=(vec<3, U, Q> const& v) + { + this->x += static_cast(v.x); + this->y += static_cast(v.y); + this->z += static_cast(v.z); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator-=(U scalar) + { + this->x -= static_cast(scalar); + this->y -= static_cast(scalar); + this->z -= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator-=(vec<1, U, Q> const& v) + { + this->x -= static_cast(v.x); + this->y -= static_cast(v.x); + this->z -= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator-=(vec<3, U, Q> const& v) + { + this->x -= static_cast(v.x); + this->y -= static_cast(v.y); + this->z -= static_cast(v.z); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator*=(U scalar) + { + this->x *= static_cast(scalar); + this->y *= static_cast(scalar); + this->z *= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator*=(vec<1, U, Q> const& v) + { + this->x *= static_cast(v.x); + this->y *= static_cast(v.x); + this->z *= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator*=(vec<3, U, Q> const& v) + { + this->x *= static_cast(v.x); + this->y *= static_cast(v.y); + this->z *= static_cast(v.z); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator/=(U v) + { + this->x /= static_cast(v); + this->y /= static_cast(v); + this->z /= static_cast(v); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator/=(vec<1, U, Q> const& v) + { + this->x /= static_cast(v.x); + this->y /= static_cast(v.x); + this->z /= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator/=(vec<3, U, Q> const& v) + { + this->x /= static_cast(v.x); + this->y /= static_cast(v.y); + this->z /= static_cast(v.z); + return *this; + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator++() + { + ++this->x; + ++this->y; + ++this->z; + return *this; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator--() + { + --this->x; + --this->y; + --this->z; + return *this; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> vec<3, T, Q>::operator++(int) + { + vec<3, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> vec<3, T, Q>::operator--(int) + { + vec<3, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary bit operators -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator%=(U scalar) + { + this->x %= scalar; + this->y %= scalar; + this->z %= scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator%=(vec<1, U, Q> const& v) + { + this->x %= v.x; + this->y %= v.x; + this->z %= v.x; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator%=(vec<3, U, Q> const& v) + { + this->x %= v.x; + this->y %= v.y; + this->z %= v.z; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator&=(U scalar) + { + this->x &= scalar; + this->y &= scalar; + this->z &= scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator&=(vec<1, U, Q> const& v) + { + this->x &= v.x; + this->y &= v.x; + this->z &= v.x; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator&=(vec<3, U, Q> const& v) + { + this->x &= v.x; + this->y &= v.y; + this->z &= v.z; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator|=(U scalar) + { + this->x |= scalar; + this->y |= scalar; + this->z |= scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator|=(vec<1, U, Q> const& v) + { + this->x |= v.x; + this->y |= v.x; + this->z |= v.x; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator|=(vec<3, U, Q> const& v) + { + this->x |= v.x; + this->y |= v.y; + this->z |= v.z; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator^=(U scalar) + { + this->x ^= scalar; + this->y ^= scalar; + this->z ^= scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator^=(vec<1, U, Q> const& v) + { + this->x ^= v.x; + this->y ^= v.x; + this->z ^= v.x; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator^=(vec<3, U, Q> const& v) + { + this->x ^= v.x; + this->y ^= v.y; + this->z ^= v.z; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator<<=(U scalar) + { + this->x <<= scalar; + this->y <<= scalar; + this->z <<= scalar; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator<<=(vec<1, U, Q> const& v) + { + this->x <<= static_cast(v.x); + this->y <<= static_cast(v.x); + this->z <<= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator<<=(vec<3, U, Q> const& v) + { + this->x <<= static_cast(v.x); + this->y <<= static_cast(v.y); + this->z <<= static_cast(v.z); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator>>=(U scalar) + { + this->x >>= static_cast(scalar); + this->y >>= static_cast(scalar); + this->z >>= static_cast(scalar); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator>>=(vec<1, U, Q> const& v) + { + this->x >>= static_cast(v.x); + this->y >>= static_cast(v.x); + this->z >>= static_cast(v.x); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator>>=(vec<3, U, Q> const& v) + { + this->x >>= static_cast(v.x); + this->y >>= static_cast(v.y); + this->z >>= static_cast(v.z); + return *this; + } + + // -- Unary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v) + { + return v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + -v.x, + -v.y, + -v.z); + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x + scalar, + v.y + scalar, + v.z + scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x + scalar.x, + v.y + scalar.x, + v.z + scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar + v.x, + scalar + v.y, + scalar + v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x + v.x, + scalar.x + v.y, + scalar.x + v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x + v2.x, + v1.y + v2.y, + v1.z + v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x - scalar, + v.y - scalar, + v.z - scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x - scalar.x, + v.y - scalar.x, + v.z - scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar - v.x, + scalar - v.y, + scalar - v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x - v.x, + scalar.x - v.y, + scalar.x - v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x - v2.x, + v1.y - v2.y, + v1.z - v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x * scalar, + v.y * scalar, + v.z * scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x * scalar.x, + v.y * scalar.x, + v.z * scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar * v.x, + scalar * v.y, + scalar * v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x * v.x, + scalar.x * v.y, + scalar.x * v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x * v2.x, + v1.y * v2.y, + v1.z * v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x / scalar, + v.y / scalar, + v.z / scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x / scalar.x, + v.y / scalar.x, + v.z / scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar / v.x, + scalar / v.y, + scalar / v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x / v.x, + scalar.x / v.y, + scalar.x / v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x / v2.x, + v1.y / v2.y, + v1.z / v2.z); + } + + // -- Binary bit operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x % scalar, + v.y % scalar, + v.z % scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x % scalar.x, + v.y % scalar.x, + v.z % scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar % v.x, + scalar % v.y, + scalar % v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x % v.x, + scalar.x % v.y, + scalar.x % v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x % v2.x, + v1.y % v2.y, + v1.z % v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x & scalar, + v.y & scalar, + v.z & scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x & scalar.x, + v.y & scalar.x, + v.z & scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar & v.x, + scalar & v.y, + scalar & v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x & v.x, + scalar.x & v.y, + scalar.x & v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x & v2.x, + v1.y & v2.y, + v1.z & v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x | scalar, + v.y | scalar, + v.z | scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x | scalar.x, + v.y | scalar.x, + v.z | scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar | v.x, + scalar | v.y, + scalar | v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x | v.x, + scalar.x | v.y, + scalar.x | v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x | v2.x, + v1.y | v2.y, + v1.z | v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x ^ scalar, + v.y ^ scalar, + v.z ^ scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x ^ scalar.x, + v.y ^ scalar.x, + v.z ^ scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar ^ v.x, + scalar ^ v.y, + scalar ^ v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x ^ v.x, + scalar.x ^ v.y, + scalar.x ^ v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x ^ v2.x, + v1.y ^ v2.y, + v1.z ^ v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x << scalar, + v.y << scalar, + v.z << scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x << scalar.x, + v.y << scalar.x, + v.z << scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar << v.x, + scalar << v.y, + scalar << v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x << v.x, + scalar.x << v.y, + scalar.x << v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x << v2.x, + v1.y << v2.y, + v1.z << v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v, T scalar) + { + return vec<3, T, Q>( + v.x >> scalar, + v.y >> scalar, + v.z >> scalar); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<3, T, Q>( + v.x >> scalar.x, + v.y >> scalar.x, + v.z >> scalar.x); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(T scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar >> v.x, + scalar >> v.y, + scalar >> v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + scalar.x >> v.x, + scalar.x >> v.y, + scalar.x >> v.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return vec<3, T, Q>( + v1.x >> v2.x, + v1.y >> v2.y, + v1.z >> v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator~(vec<3, T, Q> const& v) + { + return vec<3, T, Q>( + ~v.x, + ~v.y, + ~v.z); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return + detail::compute_equal::is_iec559>::call(v1.x, v2.x) && + detail::compute_equal::is_iec559>::call(v1.y, v2.y) && + detail::compute_equal::is_iec559>::call(v1.z, v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) + { + return !(v1 == v2); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, bool, Q> operator&&(vec<3, bool, Q> const& v1, vec<3, bool, Q> const& v2) + { + return vec<3, bool, Q>(v1.x && v2.x, v1.y && v2.y, v1.z && v2.z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, bool, Q> operator||(vec<3, bool, Q> const& v1, vec<3, bool, Q> const& v2) + { + return vec<3, bool, Q>(v1.x || v2.x, v1.y || v2.y, v1.z || v2.z); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/detail/type_vec4.hpp b/MacroLibX/third_party/glm/detail/type_vec4.hpp new file mode 100644 index 0000000..4a36434 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec4.hpp @@ -0,0 +1,505 @@ +/// @ref core +/// @file glm/detail/type_vec4.hpp + +#pragma once + +#include "qualifier.hpp" +#if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR +# include "_swizzle.hpp" +#elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION +# include "_swizzle_func.hpp" +#endif +#include + +namespace glm +{ + template + struct vec<4, T, Q> + { + // -- Implementation detail -- + + typedef T value_type; + typedef vec<4, T, Q> type; + typedef vec<4, bool, Q> bool_type; + + // -- Data -- + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wpedantic" +# elif GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" +# pragma clang diagnostic ignored "-Wnested-anon-types" +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(push) +# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union +# endif +# endif + +# if GLM_CONFIG_XYZW_ONLY + T x, y, z, w; +# elif GLM_CONFIG_ANONYMOUS_STRUCT == GLM_ENABLE + union + { + struct { T x, y, z, w; }; + struct { T r, g, b, a; }; + struct { T s, t, p, q; }; + + typename detail::storage<4, T, detail::is_aligned::value>::type data; + +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + GLM_SWIZZLE4_2_MEMBERS(T, Q, x, y, z, w) + GLM_SWIZZLE4_2_MEMBERS(T, Q, r, g, b, a) + GLM_SWIZZLE4_2_MEMBERS(T, Q, s, t, p, q) + GLM_SWIZZLE4_3_MEMBERS(T, Q, x, y, z, w) + GLM_SWIZZLE4_3_MEMBERS(T, Q, r, g, b, a) + GLM_SWIZZLE4_3_MEMBERS(T, Q, s, t, p, q) + GLM_SWIZZLE4_4_MEMBERS(T, Q, x, y, z, w) + GLM_SWIZZLE4_4_MEMBERS(T, Q, r, g, b, a) + GLM_SWIZZLE4_4_MEMBERS(T, Q, s, t, p, q) +# endif + }; +# else + union { T x, r, s; }; + union { T y, g, t; }; + union { T z, b, p; }; + union { T w, a, q; }; + +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION + GLM_SWIZZLE_GEN_VEC_FROM_VEC4(T, Q) +# endif +# endif + +# if GLM_SILENT_WARNINGS == GLM_ENABLE +# if GLM_COMPILER & GLM_COMPILER_CLANG +# pragma clang diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_GCC +# pragma GCC diagnostic pop +# elif GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(pop) +# endif +# endif + + // -- Component accesses -- + + typedef length_t length_type; + + /// Return the count of components of the vector + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 4;} + + GLM_FUNC_DECL GLM_CONSTEXPR T & operator[](length_type i); + GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; + + // -- Implicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec() GLM_DEFAULT; + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<4, T, Q> const& v) GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<4, T, P> const& v); + + // -- Explicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(T scalar); + GLM_FUNC_DECL GLM_CONSTEXPR vec(T x, T y, T z, T w); + + // -- Conversion scalar constructors -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(vec<1, U, P> const& v); + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, Y _y, Z _z, W _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, Z _z, W _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, Z _z, W _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z, W _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, Y _y, vec<1, Z, Q> const& _z, W _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z, W _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, W _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, W _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, Z _z, vec<1, W, Q> const& _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, Z _z, vec<1, W, Q> const& _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z, vec<1, W, Q> const& _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, Y _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _Y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w); + + // -- Conversion vector constructors -- + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, B _z, C _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z, C _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, B _z, vec<1, C, P> const& _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z, vec<1, C, P> const& _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<2, B, P> const& _yz, C _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz, C _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<2, B, P> const& _yz, vec<1, C, P> const& _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz, vec<1, C, P> const& _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, B _y, vec<2, C, P> const& _zw); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, B _y, vec<2, C, P> const& _zw); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<1, B, P> const& _y, vec<2, C, P> const& _zw); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<1, B, P> const& _y, vec<2, C, P> const& _zw); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<3, A, P> const& _xyz, B _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<3, A, P> const& _xyz, vec<1, B, P> const& _w); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<3, B, P> const& _yzw); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<3, B, P> const& _yzw); + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, vec<2, B, P> const& _zw); + + /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<4, U, P> const& v); + + // -- Swizzle constructors -- +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<4, T, Q, E0, E1, E2, E3> const& that) + { + *this = that(); + } + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v, detail::_swizzle<2, T, Q, F0, F1, -1, -2> const& u) + { + *this = vec<4, T, Q>(v(), u()); + } + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(T const& x, T const& y, detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v) + { + *this = vec<4, T, Q>(x, y, v()); + } + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(T const& x, detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v, T const& w) + { + *this = vec<4, T, Q>(x, v(), w); + } + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v, T const& z, T const& w) + { + *this = vec<4, T, Q>(v(), z, w); + } + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<3, T, Q, E0, E1, E2, -1> const& v, T const& w) + { + *this = vec<4, T, Q>(v(), w); + } + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec(T const& x, detail::_swizzle<3, T, Q, E0, E1, E2, -1> const& v) + { + *this = vec<4, T, Q>(x, v()); + } +# endif//GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + + // -- Unary arithmetic operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator=(vec<4, T, Q> const& v) GLM_DEFAULT; + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator+=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator+=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator+=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator-=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator-=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator-=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator*=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator*=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator*=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator/=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator/=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator/=(vec<4, U, Q> const& v); + + // -- Increment and decrement operators -- + + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator++(); + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator--(); + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator++(int); + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator--(int); + + // -- Unary bit operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator%=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator%=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator%=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator&=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator&=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator&=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator|=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator|=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator|=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator^=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator^=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator^=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator<<=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator<<=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator<<=(vec<4, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator>>=(U scalar); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator>>=(vec<1, U, Q> const& v); + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator>>=(vec<4, U, Q> const& v); + }; + + // -- Unary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v); + + // -- Binary operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v, T const & scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v, T const & scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v, T const & scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v, T const & scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v, T scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(T scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator~(vec<4, T, Q> const& v); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, bool, Q> operator&&(vec<4, bool, Q> const& v1, vec<4, bool, Q> const& v2); + + template + GLM_FUNC_DECL GLM_CONSTEXPR vec<4, bool, Q> operator||(vec<4, bool, Q> const& v1, vec<4, bool, Q> const& v2); +}//namespace glm + +#ifndef GLM_EXTERNAL_TEMPLATE +#include "type_vec4.inl" +#endif//GLM_EXTERNAL_TEMPLATE diff --git a/MacroLibX/third_party/glm/detail/type_vec4.inl b/MacroLibX/third_party/glm/detail/type_vec4.inl new file mode 100644 index 0000000..3c212d9 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec4.inl @@ -0,0 +1,1140 @@ +/// @ref core + +#include "compute_vector_relational.hpp" + +namespace glm{ +namespace detail +{ + template + struct compute_vec4_add + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); + } + }; + + template + struct compute_vec4_sub + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); + } + }; + + template + struct compute_vec4_mul + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x * b.x, a.y * b.y, a.z * b.z, a.w * b.w); + } + }; + + template + struct compute_vec4_div + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x / b.x, a.y / b.y, a.z / b.z, a.w / b.w); + } + }; + + template + struct compute_vec4_mod + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x % b.x, a.y % b.y, a.z % b.z, a.w % b.w); + } + }; + + template + struct compute_vec4_and + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x & b.x, a.y & b.y, a.z & b.z, a.w & b.w); + } + }; + + template + struct compute_vec4_or + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x | b.x, a.y | b.y, a.z | b.z, a.w | b.w); + } + }; + + template + struct compute_vec4_xor + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x ^ b.x, a.y ^ b.y, a.z ^ b.z, a.w ^ b.w); + } + }; + + template + struct compute_vec4_shift_left + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x << b.x, a.y << b.y, a.z << b.z, a.w << b.w); + } + }; + + template + struct compute_vec4_shift_right + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + return vec<4, T, Q>(a.x >> b.x, a.y >> b.y, a.z >> b.z, a.w >> b.w); + } + }; + + template + struct compute_vec4_equal + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static bool call(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return + detail::compute_equal::is_iec559>::call(v1.x, v2.x) && + detail::compute_equal::is_iec559>::call(v1.y, v2.y) && + detail::compute_equal::is_iec559>::call(v1.z, v2.z) && + detail::compute_equal::is_iec559>::call(v1.w, v2.w); + } + }; + + template + struct compute_vec4_nequal + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static bool call(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return !compute_vec4_equal::value, sizeof(T) * 8, detail::is_aligned::value>::call(v1, v2); + } + }; + + template + struct compute_vec4_bitwise_not + { + GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& v) + { + return vec<4, T, Q>(~v.x, ~v.y, ~v.z, ~v.w); + } + }; +}//namespace detail + + // -- Implicit basic constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec() +# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE + : x(0), y(0), z(0), w(0) +# endif + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<4, T, Q> const& v) + : x(v.x), y(v.y), z(v.z), w(v.w) + {} +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<4, T, P> const& v) + : x(v.x), y(v.y), z(v.z), w(v.w) + {} + + // -- Explicit basic constructors -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(T scalar) + : x(scalar), y(scalar), z(scalar), w(scalar) + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(T _x, T _y, T _z, T _w) + : x(_x), y(_y), z(_z), w(_w) + {} + + // -- Conversion scalar constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.x)) + , z(static_cast(v.x)) + , w(static_cast(v.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, Y _y, Z _z, W _w) + : x(static_cast(_x)) + , y(static_cast(_y)) + , z(static_cast(_z)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, Z _z, W _w) + : x(static_cast(_x.x)) + , y(static_cast(_y)) + , z(static_cast(_z)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, Z _z, W _w) + : x(static_cast(_x)) + , y(static_cast(_y.x)) + , z(static_cast(_z)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z, W _w) + : x(static_cast(_x.x)) + , y(static_cast(_y.x)) + , z(static_cast(_z)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, Y _y, vec<1, Z, Q> const& _z, W _w) + : x(static_cast(_x)) + , y(static_cast(_y)) + , z(static_cast(_z.x)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z, W _w) + : x(static_cast(_x.x)) + , y(static_cast(_y)) + , z(static_cast(_z.x)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, W _w) + : x(static_cast(_x)) + , y(static_cast(_y.x)) + , z(static_cast(_z.x)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, W _w) + : x(static_cast(_x.x)) + , y(static_cast(_y.x)) + , z(static_cast(_z.x)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, Z _z, vec<1, W, Q> const& _w) + : x(static_cast(_x.x)) + , y(static_cast(_y)) + , z(static_cast(_z)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, Z _z, vec<1, W, Q> const& _w) + : x(static_cast(_x)) + , y(static_cast(_y.x)) + , z(static_cast(_z)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z, vec<1, W, Q> const& _w) + : x(static_cast(_x.x)) + , y(static_cast(_y.x)) + , z(static_cast(_z)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, Y _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w) + : x(static_cast(_x)) + , y(static_cast(_y)) + , z(static_cast(_z.x)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w) + : x(static_cast(_x.x)) + , y(static_cast(_y)) + , z(static_cast(_z.x)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w) + : x(static_cast(_x)) + , y(static_cast(_y.x)) + , z(static_cast(_z.x)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w) + : x(static_cast(_x.x)) + , y(static_cast(_y.x)) + , z(static_cast(_z.x)) + , w(static_cast(_w.x)) + {} + + // -- Conversion vector constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, B _z, C _w) + : x(static_cast(_xy.x)) + , y(static_cast(_xy.y)) + , z(static_cast(_z)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z, C _w) + : x(static_cast(_xy.x)) + , y(static_cast(_xy.y)) + , z(static_cast(_z.x)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, B _z, vec<1, C, P> const& _w) + : x(static_cast(_xy.x)) + , y(static_cast(_xy.y)) + , z(static_cast(_z)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z, vec<1, C, P> const& _w) + : x(static_cast(_xy.x)) + , y(static_cast(_xy.y)) + , z(static_cast(_z.x)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, vec<2, B, P> const& _yz, C _w) + : x(static_cast(_x)) + , y(static_cast(_yz.x)) + , z(static_cast(_yz.y)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz, C _w) + : x(static_cast(_x.x)) + , y(static_cast(_yz.x)) + , z(static_cast(_yz.y)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, vec<2, B, P> const& _yz, vec<1, C, P> const& _w) + : x(static_cast(_x)) + , y(static_cast(_yz.x)) + , z(static_cast(_yz.y)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz, vec<1, C, P> const& _w) + : x(static_cast(_x.x)) + , y(static_cast(_yz.x)) + , z(static_cast(_yz.y)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, B _y, vec<2, C, P> const& _zw) + : x(static_cast(_x)) + , y(static_cast(_y)) + , z(static_cast(_zw.x)) + , w(static_cast(_zw.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, B _y, vec<2, C, P> const& _zw) + : x(static_cast(_x.x)) + , y(static_cast(_y)) + , z(static_cast(_zw.x)) + , w(static_cast(_zw.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, vec<1, B, P> const& _y, vec<2, C, P> const& _zw) + : x(static_cast(_x)) + , y(static_cast(_y.x)) + , z(static_cast(_zw.x)) + , w(static_cast(_zw.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, vec<1, B, P> const& _y, vec<2, C, P> const& _zw) + : x(static_cast(_x.x)) + , y(static_cast(_y.x)) + , z(static_cast(_zw.x)) + , w(static_cast(_zw.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<3, A, P> const& _xyz, B _w) + : x(static_cast(_xyz.x)) + , y(static_cast(_xyz.y)) + , z(static_cast(_xyz.z)) + , w(static_cast(_w)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<3, A, P> const& _xyz, vec<1, B, P> const& _w) + : x(static_cast(_xyz.x)) + , y(static_cast(_xyz.y)) + , z(static_cast(_xyz.z)) + , w(static_cast(_w.x)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, vec<3, B, P> const& _yzw) + : x(static_cast(_x)) + , y(static_cast(_yzw.x)) + , z(static_cast(_yzw.y)) + , w(static_cast(_yzw.z)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, vec<3, B, P> const& _yzw) + : x(static_cast(_x.x)) + , y(static_cast(_yzw.x)) + , z(static_cast(_yzw.y)) + , w(static_cast(_yzw.z)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, vec<2, B, P> const& _zw) + : x(static_cast(_xy.x)) + , y(static_cast(_xy.y)) + , z(static_cast(_zw.x)) + , w(static_cast(_zw.y)) + {} + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<4, U, P> const& v) + : x(static_cast(v.x)) + , y(static_cast(v.y)) + , z(static_cast(v.z)) + , w(static_cast(v.w)) + {} + + // -- Component accesses -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T& vec<4, T, Q>::operator[](typename vec<4, T, Q>::length_type i) + { + assert(i >= 0 && i < this->length()); + switch(i) + { + default: + case 0: + return x; + case 1: + return y; + case 2: + return z; + case 3: + return w; + } + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& vec<4, T, Q>::operator[](typename vec<4, T, Q>::length_type i) const + { + assert(i >= 0 && i < this->length()); + switch(i) + { + default: + case 0: + return x; + case 1: + return y; + case 2: + return z; + case 3: + return w; + } + } + + // -- Unary arithmetic operators -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>& vec<4, T, Q>::operator=(vec<4, T, Q> const& v) + { + this->x = v.x; + this->y = v.y; + this->z = v.z; + this->w = v.w; + return *this; + } +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>& vec<4, T, Q>::operator=(vec<4, U, Q> const& v) + { + this->x = static_cast(v.x); + this->y = static_cast(v.y); + this->z = static_cast(v.z); + this->w = static_cast(v.w); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator+=(U scalar) + { + return (*this = detail::compute_vec4_add::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator+=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_add::value>::call(*this, vec<4, T, Q>(v.x))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator+=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_add::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator-=(U scalar) + { + return (*this = detail::compute_vec4_sub::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator-=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_sub::value>::call(*this, vec<4, T, Q>(v.x))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator-=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_sub::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator*=(U scalar) + { + return (*this = detail::compute_vec4_mul::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator*=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_mul::value>::call(*this, vec<4, T, Q>(v.x))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator*=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_mul::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator/=(U scalar) + { + return (*this = detail::compute_vec4_div::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator/=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_div::value>::call(*this, vec<4, T, Q>(v.x))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator/=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_div::value>::call(*this, vec<4, T, Q>(v))); + } + + // -- Increment and decrement operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator++() + { + ++this->x; + ++this->y; + ++this->z; + ++this->w; + return *this; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator--() + { + --this->x; + --this->y; + --this->z; + --this->w; + return *this; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> vec<4, T, Q>::operator++(int) + { + vec<4, T, Q> Result(*this); + ++*this; + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> vec<4, T, Q>::operator--(int) + { + vec<4, T, Q> Result(*this); + --*this; + return Result; + } + + // -- Unary bit operators -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator%=(U scalar) + { + return (*this = detail::compute_vec4_mod::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator%=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_mod::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator%=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_mod::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator&=(U scalar) + { + return (*this = detail::compute_vec4_and::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator&=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_and::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator&=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_and::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator|=(U scalar) + { + return (*this = detail::compute_vec4_or::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator|=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_or::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator|=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_or::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator^=(U scalar) + { + return (*this = detail::compute_vec4_xor::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator^=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_xor::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator^=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_xor::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator<<=(U scalar) + { + return (*this = detail::compute_vec4_shift_left::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator<<=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_shift_left::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator<<=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_shift_left::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator>>=(U scalar) + { + return (*this = detail::compute_vec4_shift_right::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator>>=(vec<1, U, Q> const& v) + { + return (*this = detail::compute_vec4_shift_right::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator>>=(vec<4, U, Q> const& v) + { + return (*this = detail::compute_vec4_shift_right::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); + } + + // -- Unary constant operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v) + { + return v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v) + { + return vec<4, T, Q>(0) -= v; + } + + // -- Binary arithmetic operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v, T const & scalar) + { + return vec<4, T, Q>(v) += scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) += v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(v) += scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v2) += v1; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) += v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v, T const & scalar) + { + return vec<4, T, Q>(v) -= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) -= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar) -= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1.x) -= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) -= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v, T const & scalar) + { + return vec<4, T, Q>(v) *= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) *= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(v) *= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v2) *= v1; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) *= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v, T const & scalar) + { + return vec<4, T, Q>(v) /= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) /= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar) /= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1.x) /= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) /= v2; + } + + // -- Binary bit operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v, T scalar) + { + return vec<4, T, Q>(v) %= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) %= v2.x; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar) %= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar.x) %= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) %= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v, T scalar) + { + return vec<4, T, Q>(v) &= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar) + { + return vec<4, T, Q>(v) &= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar) &= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1.x) &= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) &= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v, T scalar) + { + return vec<4, T, Q>(v) |= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) |= v2.x; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar) |= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1.x) |= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) |= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v, T scalar) + { + return vec<4, T, Q>(v) ^= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) ^= v2.x; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar) ^= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1.x) ^= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) ^= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v, T scalar) + { + return vec<4, T, Q>(v) <<= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) <<= v2.x; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar) <<= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1.x) <<= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) <<= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v, T scalar) + { + return vec<4, T, Q>(v) >>= scalar; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) + { + return vec<4, T, Q>(v1) >>= v2.x; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(T scalar, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(scalar) >>= v; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1.x) >>= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return vec<4, T, Q>(v1) >>= v2; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator~(vec<4, T, Q> const& v) + { + return detail::compute_vec4_bitwise_not::value, sizeof(T) * 8, detail::is_aligned::value>::call(v); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return detail::compute_vec4_equal::value, sizeof(T) * 8, detail::is_aligned::value>::call(v1, v2); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) + { + return detail::compute_vec4_nequal::value, sizeof(T) * 8, detail::is_aligned::value>::call(v1, v2); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, bool, Q> operator&&(vec<4, bool, Q> const& v1, vec<4, bool, Q> const& v2) + { + return vec<4, bool, Q>(v1.x && v2.x, v1.y && v2.y, v1.z && v2.z, v1.w && v2.w); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, bool, Q> operator||(vec<4, bool, Q> const& v1, vec<4, bool, Q> const& v2) + { + return vec<4, bool, Q>(v1.x || v2.x, v1.y || v2.y, v1.z || v2.z, v1.w || v2.w); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "type_vec4_simd.inl" +#endif diff --git a/MacroLibX/third_party/glm/detail/type_vec4_simd.inl b/MacroLibX/third_party/glm/detail/type_vec4_simd.inl new file mode 100644 index 0000000..29559b5 --- /dev/null +++ b/MacroLibX/third_party/glm/detail/type_vec4_simd.inl @@ -0,0 +1,775 @@ +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +namespace glm{ +namespace detail +{ +# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + template + struct _swizzle_base1<4, float, Q, E0,E1,E2,E3, true> : public _swizzle_base0 + { + GLM_FUNC_QUALIFIER vec<4, float, Q> operator ()() const + { + __m128 data = *reinterpret_cast<__m128 const*>(&this->_buffer); + + vec<4, float, Q> Result; +# if GLM_ARCH & GLM_ARCH_AVX_BIT + Result.data = _mm_permute_ps(data, _MM_SHUFFLE(E3, E2, E1, E0)); +# else + Result.data = _mm_shuffle_ps(data, data, _MM_SHUFFLE(E3, E2, E1, E0)); +# endif + return Result; + } + }; + + template + struct _swizzle_base1<4, int, Q, E0,E1,E2,E3, true> : public _swizzle_base0 + { + GLM_FUNC_QUALIFIER vec<4, int, Q> operator ()() const + { + __m128i data = *reinterpret_cast<__m128i const*>(&this->_buffer); + + vec<4, int, Q> Result; + Result.data = _mm_shuffle_epi32(data, _MM_SHUFFLE(E3, E2, E1, E0)); + return Result; + } + }; + + template + struct _swizzle_base1<4, uint, Q, E0,E1,E2,E3, true> : public _swizzle_base0 + { + GLM_FUNC_QUALIFIER vec<4, uint, Q> operator ()() const + { + __m128i data = *reinterpret_cast<__m128i const*>(&this->_buffer); + + vec<4, uint, Q> Result; + Result.data = _mm_shuffle_epi32(data, _MM_SHUFFLE(E3, E2, E1, E0)); + return Result; + } + }; +# endif// GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR + + template + struct compute_vec4_add + { + static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) + { + vec<4, float, Q> Result; + Result.data = _mm_add_ps(a.data, b.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template + struct compute_vec4_add + { + static vec<4, double, Q> call(vec<4, double, Q> const& a, vec<4, double, Q> const& b) + { + vec<4, double, Q> Result; + Result.data = _mm256_add_pd(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_vec4_sub + { + static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) + { + vec<4, float, Q> Result; + Result.data = _mm_sub_ps(a.data, b.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template + struct compute_vec4_sub + { + static vec<4, double, Q> call(vec<4, double, Q> const& a, vec<4, double, Q> const& b) + { + vec<4, double, Q> Result; + Result.data = _mm256_sub_pd(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_vec4_mul + { + static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) + { + vec<4, float, Q> Result; + Result.data = _mm_mul_ps(a.data, b.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template + struct compute_vec4_mul + { + static vec<4, double, Q> call(vec<4, double, Q> const& a, vec<4, double, Q> const& b) + { + vec<4, double, Q> Result; + Result.data = _mm256_mul_pd(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_vec4_div + { + static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) + { + vec<4, float, Q> Result; + Result.data = _mm_div_ps(a.data, b.data); + return Result; + } + }; + + # if GLM_ARCH & GLM_ARCH_AVX_BIT + template + struct compute_vec4_div + { + static vec<4, double, Q> call(vec<4, double, Q> const& a, vec<4, double, Q> const& b) + { + vec<4, double, Q> Result; + Result.data = _mm256_div_pd(a.data, b.data); + return Result; + } + }; +# endif + + template<> + struct compute_vec4_div + { + static vec<4, float, aligned_lowp> call(vec<4, float, aligned_lowp> const& a, vec<4, float, aligned_lowp> const& b) + { + vec<4, float, aligned_lowp> Result; + Result.data = _mm_mul_ps(a.data, _mm_rcp_ps(b.data)); + return Result; + } + }; + + template + struct compute_vec4_and + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm_and_si128(a.data, b.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX2_BIT + template + struct compute_vec4_and + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm256_and_si256(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_vec4_or + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm_or_si128(a.data, b.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX2_BIT + template + struct compute_vec4_or + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm256_or_si256(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_vec4_xor + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm_xor_si128(a.data, b.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX2_BIT + template + struct compute_vec4_xor + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm256_xor_si256(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_vec4_shift_left + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm_sll_epi32(a.data, b.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX2_BIT + template + struct compute_vec4_shift_left + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm256_sll_epi64(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_vec4_shift_right + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm_srl_epi32(a.data, b.data); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX2_BIT + template + struct compute_vec4_shift_right + { + static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) + { + vec<4, T, Q> Result; + Result.data = _mm256_srl_epi64(a.data, b.data); + return Result; + } + }; +# endif + + template + struct compute_vec4_bitwise_not + { + static vec<4, T, Q> call(vec<4, T, Q> const& v) + { + vec<4, T, Q> Result; + Result.data = _mm_xor_si128(v.data, _mm_set1_epi32(-1)); + return Result; + } + }; + +# if GLM_ARCH & GLM_ARCH_AVX2_BIT + template + struct compute_vec4_bitwise_not + { + static vec<4, T, Q> call(vec<4, T, Q> const& v) + { + vec<4, T, Q> Result; + Result.data = _mm256_xor_si256(v.data, _mm_set1_epi32(-1)); + return Result; + } + }; +# endif + + template + struct compute_vec4_equal + { + static bool call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) + { + return _mm_movemask_ps(_mm_cmpeq_ps(v1.data, v2.data)) != 0; + } + }; + +# if GLM_ARCH & GLM_ARCH_SSE41_BIT + template + struct compute_vec4_equal + { + static bool call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) + { + //return _mm_movemask_epi8(_mm_cmpeq_epi32(v1.data, v2.data)) != 0; + __m128i neq = _mm_xor_si128(v1.data, v2.data); + return _mm_test_all_zeros(neq, neq) == 0; + } + }; +# endif + + template + struct compute_vec4_nequal + { + static bool call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) + { + return _mm_movemask_ps(_mm_cmpneq_ps(v1.data, v2.data)) != 0; + } + }; + +# if GLM_ARCH & GLM_ARCH_SSE41_BIT + template + struct compute_vec4_nequal + { + static bool call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) + { + //return _mm_movemask_epi8(_mm_cmpneq_epi32(v1.data, v2.data)) != 0; + __m128i neq = _mm_xor_si128(v1.data, v2.data); + return _mm_test_all_zeros(neq, neq) != 0; + } + }; +# endif +}//namespace detail + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(float _s) : + data(_mm_set1_ps(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(float _s) : + data(_mm_set1_ps(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(float _s) : + data(_mm_set1_ps(_s)) + {} + +# if GLM_ARCH & GLM_ARCH_AVX_BIT + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, double, aligned_lowp>::vec(double _s) : + data(_mm256_set1_pd(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, double, aligned_mediump>::vec(double _s) : + data(_mm256_set1_pd(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, double, aligned_highp>::vec(double _s) : + data(_mm256_set1_pd(_s)) + {} +# endif + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_lowp>::vec(int _s) : + data(_mm_set1_epi32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_mediump>::vec(int _s) : + data(_mm_set1_epi32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_highp>::vec(int _s) : + data(_mm_set1_epi32(_s)) + {} + +# if GLM_ARCH & GLM_ARCH_AVX2_BIT + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, detail::int64, aligned_lowp>::vec(detail::int64 _s) : + data(_mm256_set1_epi64x(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, detail::int64, aligned_mediump>::vec(detail::int64 _s) : + data(_mm256_set1_epi64x(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, detail::int64, aligned_highp>::vec(detail::int64 _s) : + data(_mm256_set1_epi64x(_s)) + {} +# endif + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(float _x, float _y, float _z, float _w) : + data(_mm_set_ps(_w, _z, _y, _x)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(float _x, float _y, float _z, float _w) : + data(_mm_set_ps(_w, _z, _y, _x)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(float _x, float _y, float _z, float _w) : + data(_mm_set_ps(_w, _z, _y, _x)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_lowp>::vec(int _x, int _y, int _z, int _w) : + data(_mm_set_epi32(_w, _z, _y, _x)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_mediump>::vec(int _x, int _y, int _z, int _w) : + data(_mm_set_epi32(_w, _z, _y, _x)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_highp>::vec(int _x, int _y, int _z, int _w) : + data(_mm_set_epi32(_w, _z, _y, _x)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(int _x, int _y, int _z, int _w) : + data(_mm_cvtepi32_ps(_mm_set_epi32(_w, _z, _y, _x))) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(int _x, int _y, int _z, int _w) : + data(_mm_cvtepi32_ps(_mm_set_epi32(_w, _z, _y, _x))) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(int _x, int _y, int _z, int _w) : + data(_mm_cvtepi32_ps(_mm_set_epi32(_w, _z, _y, _x))) + {} +}//namespace glm + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT + +#if GLM_ARCH & GLM_ARCH_NEON_BIT +namespace glm { +namespace detail { + + template + struct compute_vec4_add + { + static + vec<4, float, Q> + call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) + { + vec<4, float, Q> Result; + Result.data = vaddq_f32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_add + { + static + vec<4, uint, Q> + call(vec<4, uint, Q> const& a, vec<4, uint, Q> const& b) + { + vec<4, uint, Q> Result; + Result.data = vaddq_u32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_add + { + static + vec<4, int, Q> + call(vec<4, int, Q> const& a, vec<4, int, Q> const& b) + { + vec<4, uint, Q> Result; + Result.data = vaddq_s32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_sub + { + static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) + { + vec<4, float, Q> Result; + Result.data = vsubq_f32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_sub + { + static vec<4, uint, Q> call(vec<4, uint, Q> const& a, vec<4, uint, Q> const& b) + { + vec<4, uint, Q> Result; + Result.data = vsubq_u32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_sub + { + static vec<4, int, Q> call(vec<4, int, Q> const& a, vec<4, int, Q> const& b) + { + vec<4, int, Q> Result; + Result.data = vsubq_s32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_mul + { + static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) + { + vec<4, float, Q> Result; + Result.data = vmulq_f32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_mul + { + static vec<4, uint, Q> call(vec<4, uint, Q> const& a, vec<4, uint, Q> const& b) + { + vec<4, uint, Q> Result; + Result.data = vmulq_u32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_mul + { + static vec<4, int, Q> call(vec<4, int, Q> const& a, vec<4, int, Q> const& b) + { + vec<4, int, Q> Result; + Result.data = vmulq_s32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_div + { + static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) + { + vec<4, float, Q> Result; + Result.data = vdivq_f32(a.data, b.data); + return Result; + } + }; + + template + struct compute_vec4_equal + { + static bool call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) + { + uint32x4_t cmp = vceqq_f32(v1.data, v2.data); +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + cmp = vpminq_u32(cmp, cmp); + cmp = vpminq_u32(cmp, cmp); + uint32_t r = cmp[0]; +#else + uint32x2_t cmpx2 = vpmin_u32(vget_low_f32(cmp), vget_high_f32(cmp)); + cmpx2 = vpmin_u32(cmpx2, cmpx2); + uint32_t r = cmpx2[0]; +#endif + return r == ~0u; + } + }; + + template + struct compute_vec4_equal + { + static bool call(vec<4, uint, Q> const& v1, vec<4, uint, Q> const& v2) + { + uint32x4_t cmp = vceqq_u32(v1.data, v2.data); +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + cmp = vpminq_u32(cmp, cmp); + cmp = vpminq_u32(cmp, cmp); + uint32_t r = cmp[0]; +#else + uint32x2_t cmpx2 = vpmin_u32(vget_low_f32(cmp), vget_high_f32(cmp)); + cmpx2 = vpmin_u32(cmpx2, cmpx2); + uint32_t r = cmpx2[0]; +#endif + return r == ~0u; + } + }; + + template + struct compute_vec4_equal + { + static bool call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) + { + uint32x4_t cmp = vceqq_s32(v1.data, v2.data); +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + cmp = vpminq_u32(cmp, cmp); + cmp = vpminq_u32(cmp, cmp); + uint32_t r = cmp[0]; +#else + uint32x2_t cmpx2 = vpmin_u32(vget_low_f32(cmp), vget_high_f32(cmp)); + cmpx2 = vpmin_u32(cmpx2, cmpx2); + uint32_t r = cmpx2[0]; +#endif + return r == ~0u; + } + }; + + template + struct compute_vec4_nequal + { + static bool call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) + { + return !compute_vec4_equal::call(v1, v2); + } + }; + + template + struct compute_vec4_nequal + { + static bool call(vec<4, uint, Q> const& v1, vec<4, uint, Q> const& v2) + { + return !compute_vec4_equal::call(v1, v2); + } + }; + + template + struct compute_vec4_nequal + { + static bool call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) + { + return !compute_vec4_equal::call(v1, v2); + } + }; + +}//namespace detail + +#if !GLM_CONFIG_XYZW_ONLY + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(float _s) : + data(vdupq_n_f32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(float _s) : + data(vdupq_n_f32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(float _s) : + data(vdupq_n_f32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_lowp>::vec(int _s) : + data(vdupq_n_s32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_mediump>::vec(int _s) : + data(vdupq_n_s32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_highp>::vec(int _s) : + data(vdupq_n_s32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, uint, aligned_lowp>::vec(uint _s) : + data(vdupq_n_u32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, uint, aligned_mediump>::vec(uint _s) : + data(vdupq_n_u32(_s)) + {} + + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, uint, aligned_highp>::vec(uint _s) : + data(vdupq_n_u32(_s)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(const vec<4, float, aligned_highp>& rhs) : + data(rhs.data) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(const vec<4, int, aligned_highp>& rhs) : + data(vcvtq_f32_s32(rhs.data)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(const vec<4, uint, aligned_highp>& rhs) : + data(vcvtq_f32_u32(rhs.data)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(int _x, int _y, int _z, int _w) : + data(vcvtq_f32_s32(vec<4, int, aligned_lowp>(_x, _y, _z, _w).data)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(int _x, int _y, int _z, int _w) : + data(vcvtq_f32_s32(vec<4, int, aligned_mediump>(_x, _y, _z, _w).data)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(int _x, int _y, int _z, int _w) : + data(vcvtq_f32_s32(vec<4, int, aligned_highp>(_x, _y, _z, _w).data)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(uint _x, uint _y, uint _z, uint _w) : + data(vcvtq_f32_u32(vec<4, uint, aligned_lowp>(_x, _y, _z, _w).data)) + {} + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(uint _x, uint _y, uint _z, uint _w) : + data(vcvtq_f32_u32(vec<4, uint, aligned_mediump>(_x, _y, _z, _w).data)) + {} + + + template<> + template<> + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(uint _x, uint _y, uint _z, uint _w) : + data(vcvtq_f32_u32(vec<4, uint, aligned_highp>(_x, _y, _z, _w).data)) + {} + +#endif +}//namespace glm + +#endif diff --git a/MacroLibX/third_party/glm/exponential.hpp b/MacroLibX/third_party/glm/exponential.hpp new file mode 100644 index 0000000..f8fb886 --- /dev/null +++ b/MacroLibX/third_party/glm/exponential.hpp @@ -0,0 +1,110 @@ +/// @ref core +/// @file glm/exponential.hpp +/// +/// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions +/// +/// @defgroup core_func_exponential Exponential functions +/// @ingroup core +/// +/// Provides GLSL exponential functions +/// +/// These all operate component-wise. The description is per component. +/// +/// Include to use these core features. + +#pragma once + +#include "detail/type_vec1.hpp" +#include "detail/type_vec2.hpp" +#include "detail/type_vec3.hpp" +#include "detail/type_vec4.hpp" +#include + +namespace glm +{ + /// @addtogroup core_func_exponential + /// @{ + + /// Returns 'base' raised to the power 'exponent'. + /// + /// @param base Floating point value. pow function is defined for input values of 'base' defined in the range (inf-, inf+) in the limit of the type qualifier. + /// @param exponent Floating point value representing the 'exponent'. + /// + /// @see GLSL pow man page + /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions + template + GLM_FUNC_DECL vec pow(vec const& base, vec const& exponent); + + /// Returns the natural exponentiation of x, i.e., e^x. + /// + /// @param v exp function is defined for input values of v defined in the range (inf-, inf+) in the limit of the type qualifier. + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL exp man page + /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions + template + GLM_FUNC_DECL vec exp(vec const& v); + + /// Returns the natural logarithm of v, i.e., + /// returns the value y which satisfies the equation x = e^y. + /// Results are undefined if v <= 0. + /// + /// @param v log function is defined for input values of v defined in the range (0, inf+) in the limit of the type qualifier. + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL log man page + /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions + template + GLM_FUNC_DECL vec log(vec const& v); + + /// Returns 2 raised to the v power. + /// + /// @param v exp2 function is defined for input values of v defined in the range (inf-, inf+) in the limit of the type qualifier. + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL exp2 man page + /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions + template + GLM_FUNC_DECL vec exp2(vec const& v); + + /// Returns the base 2 log of x, i.e., returns the value y, + /// which satisfies the equation x = 2 ^ y. + /// + /// @param v log2 function is defined for input values of v defined in the range (0, inf+) in the limit of the type qualifier. + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL log2 man page + /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions + template + GLM_FUNC_DECL vec log2(vec const& v); + + /// Returns the positive square root of v. + /// + /// @param v sqrt function is defined for input values of v defined in the range [0, inf+) in the limit of the type qualifier. + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL sqrt man page + /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions + template + GLM_FUNC_DECL vec sqrt(vec const& v); + + /// Returns the reciprocal of the positive square root of v. + /// + /// @param v inversesqrt function is defined for input values of v defined in the range [0, inf+) in the limit of the type qualifier. + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL inversesqrt man page + /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions + template + GLM_FUNC_DECL vec inversesqrt(vec const& v); + + /// @} +}//namespace glm + +#include "detail/func_exponential.inl" diff --git a/MacroLibX/third_party/glm/ext.hpp b/MacroLibX/third_party/glm/ext.hpp new file mode 100644 index 0000000..3bc8db2 --- /dev/null +++ b/MacroLibX/third_party/glm/ext.hpp @@ -0,0 +1,196 @@ +/// @file glm/ext.hpp +/// +/// @ref core (Dependence) + +#include "detail/setup.hpp" + +#pragma once + +#include "glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_MESSAGE_EXT_INCLUDED_DISPLAYED) +# define GLM_MESSAGE_EXT_INCLUDED_DISPLAYED +# pragma message("GLM: All extensions included (not recommended)") +#endif//GLM_MESSAGES + +#include "./ext/matrix_double2x2.hpp" +#include "./ext/matrix_double2x2_precision.hpp" +#include "./ext/matrix_double2x3.hpp" +#include "./ext/matrix_double2x3_precision.hpp" +#include "./ext/matrix_double2x4.hpp" +#include "./ext/matrix_double2x4_precision.hpp" +#include "./ext/matrix_double3x2.hpp" +#include "./ext/matrix_double3x2_precision.hpp" +#include "./ext/matrix_double3x3.hpp" +#include "./ext/matrix_double3x3_precision.hpp" +#include "./ext/matrix_double3x4.hpp" +#include "./ext/matrix_double3x4_precision.hpp" +#include "./ext/matrix_double4x2.hpp" +#include "./ext/matrix_double4x2_precision.hpp" +#include "./ext/matrix_double4x3.hpp" +#include "./ext/matrix_double4x3_precision.hpp" +#include "./ext/matrix_double4x4.hpp" +#include "./ext/matrix_double4x4_precision.hpp" + +#include "./ext/matrix_float2x2.hpp" +#include "./ext/matrix_float2x2_precision.hpp" +#include "./ext/matrix_float2x3.hpp" +#include "./ext/matrix_float2x3_precision.hpp" +#include "./ext/matrix_float2x4.hpp" +#include "./ext/matrix_float2x4_precision.hpp" +#include "./ext/matrix_float3x2.hpp" +#include "./ext/matrix_float3x2_precision.hpp" +#include "./ext/matrix_float3x3.hpp" +#include "./ext/matrix_float3x3_precision.hpp" +#include "./ext/matrix_float3x4.hpp" +#include "./ext/matrix_float3x4_precision.hpp" +#include "./ext/matrix_float4x2.hpp" +#include "./ext/matrix_float4x2_precision.hpp" +#include "./ext/matrix_float4x3.hpp" +#include "./ext/matrix_float4x3_precision.hpp" +#include "./ext/matrix_float4x4.hpp" +#include "./ext/matrix_float4x4_precision.hpp" + +#include "./ext/matrix_relational.hpp" + +#include "./ext/quaternion_double.hpp" +#include "./ext/quaternion_double_precision.hpp" +#include "./ext/quaternion_float.hpp" +#include "./ext/quaternion_float_precision.hpp" +#include "./ext/quaternion_geometric.hpp" +#include "./ext/quaternion_relational.hpp" + +#include "./ext/scalar_constants.hpp" +#include "./ext/scalar_int_sized.hpp" +#include "./ext/scalar_relational.hpp" + +#include "./ext/vector_bool1.hpp" +#include "./ext/vector_bool1_precision.hpp" +#include "./ext/vector_bool2.hpp" +#include "./ext/vector_bool2_precision.hpp" +#include "./ext/vector_bool3.hpp" +#include "./ext/vector_bool3_precision.hpp" +#include "./ext/vector_bool4.hpp" +#include "./ext/vector_bool4_precision.hpp" + +#include "./ext/vector_double1.hpp" +#include "./ext/vector_double1_precision.hpp" +#include "./ext/vector_double2.hpp" +#include "./ext/vector_double2_precision.hpp" +#include "./ext/vector_double3.hpp" +#include "./ext/vector_double3_precision.hpp" +#include "./ext/vector_double4.hpp" +#include "./ext/vector_double4_precision.hpp" + +#include "./ext/vector_float1.hpp" +#include "./ext/vector_float1_precision.hpp" +#include "./ext/vector_float2.hpp" +#include "./ext/vector_float2_precision.hpp" +#include "./ext/vector_float3.hpp" +#include "./ext/vector_float3_precision.hpp" +#include "./ext/vector_float4.hpp" +#include "./ext/vector_float4_precision.hpp" + +#include "./ext/vector_int1.hpp" +#include "./ext/vector_int1_precision.hpp" +#include "./ext/vector_int2.hpp" +#include "./ext/vector_int2_precision.hpp" +#include "./ext/vector_int3.hpp" +#include "./ext/vector_int3_precision.hpp" +#include "./ext/vector_int4.hpp" +#include "./ext/vector_int4_precision.hpp" + +#include "./ext/vector_relational.hpp" + +#include "./ext/vector_uint1.hpp" +#include "./ext/vector_uint1_precision.hpp" +#include "./ext/vector_uint2.hpp" +#include "./ext/vector_uint2_precision.hpp" +#include "./ext/vector_uint3.hpp" +#include "./ext/vector_uint3_precision.hpp" +#include "./ext/vector_uint4.hpp" +#include "./ext/vector_uint4_precision.hpp" + +#include "./gtc/bitfield.hpp" +#include "./gtc/color_space.hpp" +#include "./gtc/constants.hpp" +#include "./gtc/epsilon.hpp" +#include "./gtc/integer.hpp" +#include "./gtc/matrix_access.hpp" +#include "./gtc/matrix_integer.hpp" +#include "./gtc/matrix_inverse.hpp" +#include "./gtc/matrix_transform.hpp" +#include "./gtc/noise.hpp" +#include "./gtc/packing.hpp" +#include "./gtc/quaternion.hpp" +#include "./gtc/random.hpp" +#include "./gtc/reciprocal.hpp" +#include "./gtc/round.hpp" +#include "./gtc/type_precision.hpp" +#include "./gtc/type_ptr.hpp" +#include "./gtc/ulp.hpp" +#include "./gtc/vec1.hpp" +#if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE +# include "./gtc/type_aligned.hpp" +#endif + +#ifdef GLM_ENABLE_EXPERIMENTAL +#include "./gtx/associated_min_max.hpp" +#include "./gtx/bit.hpp" +#include "./gtx/closest_point.hpp" +#include "./gtx/color_encoding.hpp" +#include "./gtx/color_space.hpp" +#include "./gtx/color_space_YCoCg.hpp" +#include "./gtx/compatibility.hpp" +#include "./gtx/component_wise.hpp" +#include "./gtx/dual_quaternion.hpp" +#include "./gtx/euler_angles.hpp" +#include "./gtx/extend.hpp" +#include "./gtx/extended_min_max.hpp" +#include "./gtx/fast_exponential.hpp" +#include "./gtx/fast_square_root.hpp" +#include "./gtx/fast_trigonometry.hpp" +#include "./gtx/functions.hpp" +#include "./gtx/gradient_paint.hpp" +#include "./gtx/handed_coordinate_space.hpp" +#include "./gtx/integer.hpp" +#include "./gtx/intersect.hpp" +#include "./gtx/log_base.hpp" +#include "./gtx/matrix_cross_product.hpp" +#include "./gtx/matrix_interpolation.hpp" +#include "./gtx/matrix_major_storage.hpp" +#include "./gtx/matrix_operation.hpp" +#include "./gtx/matrix_query.hpp" +#include "./gtx/mixed_product.hpp" +#include "./gtx/norm.hpp" +#include "./gtx/normal.hpp" +#include "./gtx/normalize_dot.hpp" +#include "./gtx/number_precision.hpp" +#include "./gtx/optimum_pow.hpp" +#include "./gtx/orthonormalize.hpp" +#include "./gtx/perpendicular.hpp" +#include "./gtx/polar_coordinates.hpp" +#include "./gtx/projection.hpp" +#include "./gtx/quaternion.hpp" +#include "./gtx/raw_data.hpp" +#include "./gtx/rotate_vector.hpp" +#include "./gtx/spline.hpp" +#include "./gtx/std_based_type.hpp" +#if !(GLM_COMPILER & GLM_COMPILER_CUDA) +# include "./gtx/string_cast.hpp" +#endif +#include "./gtx/transform.hpp" +#include "./gtx/transform2.hpp" +#include "./gtx/vec_swizzle.hpp" +#include "./gtx/vector_angle.hpp" +#include "./gtx/vector_query.hpp" +#include "./gtx/wrap.hpp" + +#if GLM_HAS_TEMPLATE_ALIASES +# include "./gtx/scalar_multiplication.hpp" +#endif + +#if GLM_HAS_RANGE_FOR +# include "./gtx/range.hpp" +#endif +#endif//GLM_ENABLE_EXPERIMENTAL diff --git a/MacroLibX/third_party/glm/ext/matrix_clip_space.hpp b/MacroLibX/third_party/glm/ext/matrix_clip_space.hpp new file mode 100644 index 0000000..c3874f2 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_clip_space.hpp @@ -0,0 +1,522 @@ +/// @ref ext_matrix_clip_space +/// @file glm/ext/matrix_clip_space.hpp +/// +/// @defgroup ext_matrix_clip_space GLM_EXT_matrix_clip_space +/// @ingroup ext +/// +/// Defines functions that generate clip space transformation matrices. +/// +/// The matrices generated by this extension use standard OpenGL fixed-function +/// conventions. For example, the lookAt function generates a transform from world +/// space into the specific eye space that the projective matrix functions +/// (perspective, ortho, etc) are designed to expect. The OpenGL compatibility +/// specifications defines the particular layout of this eye space. +/// +/// Include to use the features of this extension. +/// +/// @see ext_matrix_transform +/// @see ext_matrix_projection + +#pragma once + +// Dependencies +#include "../ext/scalar_constants.hpp" +#include "../geometric.hpp" +#include "../trigonometric.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_matrix_clip_space extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_matrix_clip_space + /// @{ + + /// Creates a matrix for projecting two-dimensional coordinates onto the screen. + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top, T const& zNear, T const& zFar) + /// @see gluOrtho2D man page + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> ortho( + T left, T right, T bottom, T top); + + /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoLH_ZO( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a matrix for an orthographic parallel viewing volume using right-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoLH_NO( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoRH_ZO( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a matrix for an orthographic parallel viewing volume, using right-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoRH_NO( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoZO( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoNO( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates. + /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoLH( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a matrix for an orthographic parallel viewing volume, using right-handed coordinates. + /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoRH( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a matrix for an orthographic parallel viewing volume, using the default handedness and default near and far clip planes definition. + /// To change default handedness use GLM_FORCE_LEFT_HANDED. To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. + /// + /// @tparam T A floating-point scalar type + /// + /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) + /// @see glOrtho man page + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> ortho( + T left, T right, T bottom, T top, T zNear, T zFar); + + /// Creates a left handed frustum matrix. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumLH_ZO( + T left, T right, T bottom, T top, T near, T far); + + /// Creates a left handed frustum matrix. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumLH_NO( + T left, T right, T bottom, T top, T near, T far); + + /// Creates a right handed frustum matrix. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumRH_ZO( + T left, T right, T bottom, T top, T near, T far); + + /// Creates a right handed frustum matrix. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumRH_NO( + T left, T right, T bottom, T top, T near, T far); + + /// Creates a frustum matrix using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumZO( + T left, T right, T bottom, T top, T near, T far); + + /// Creates a frustum matrix using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumNO( + T left, T right, T bottom, T top, T near, T far); + + /// Creates a left handed frustum matrix. + /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumLH( + T left, T right, T bottom, T top, T near, T far); + + /// Creates a right handed frustum matrix. + /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumRH( + T left, T right, T bottom, T top, T near, T far); + + /// Creates a frustum matrix with default handedness, using the default handedness and default near and far clip planes definition. + /// To change default handedness use GLM_FORCE_LEFT_HANDED. To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. + /// + /// @tparam T A floating-point scalar type + /// @see glFrustum man page + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> frustum( + T left, T right, T bottom, T top, T near, T far); + + + /// Creates a matrix for a right handed, symetric perspective-view frustum. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveRH_ZO( + T fovy, T aspect, T near, T far); + + /// Creates a matrix for a right handed, symetric perspective-view frustum. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveRH_NO( + T fovy, T aspect, T near, T far); + + /// Creates a matrix for a left handed, symetric perspective-view frustum. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveLH_ZO( + T fovy, T aspect, T near, T far); + + /// Creates a matrix for a left handed, symetric perspective-view frustum. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveLH_NO( + T fovy, T aspect, T near, T far); + + /// Creates a matrix for a symetric perspective-view frustum using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveZO( + T fovy, T aspect, T near, T far); + + /// Creates a matrix for a symetric perspective-view frustum using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveNO( + T fovy, T aspect, T near, T far); + + /// Creates a matrix for a right handed, symetric perspective-view frustum. + /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveRH( + T fovy, T aspect, T near, T far); + + /// Creates a matrix for a left handed, symetric perspective-view frustum. + /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveLH( + T fovy, T aspect, T near, T far); + + /// Creates a matrix for a symetric perspective-view frustum based on the default handedness and default near and far clip planes definition. + /// To change default handedness use GLM_FORCE_LEFT_HANDED. To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. + /// + /// @param fovy Specifies the field of view angle in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + /// @see gluPerspective man page + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspective( + T fovy, T aspect, T near, T far); + + /// Builds a perspective projection matrix based on a field of view using right-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovRH_ZO( + T fov, T width, T height, T near, T far); + + /// Builds a perspective projection matrix based on a field of view using right-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovRH_NO( + T fov, T width, T height, T near, T far); + + /// Builds a perspective projection matrix based on a field of view using left-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovLH_ZO( + T fov, T width, T height, T near, T far); + + /// Builds a perspective projection matrix based on a field of view using left-handed coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovLH_NO( + T fov, T width, T height, T near, T far); + + /// Builds a perspective projection matrix based on a field of view using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovZO( + T fov, T width, T height, T near, T far); + + /// Builds a perspective projection matrix based on a field of view using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovNO( + T fov, T width, T height, T near, T far); + + /// Builds a right handed perspective projection matrix based on a field of view. + /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovRH( + T fov, T width, T height, T near, T far); + + /// Builds a left handed perspective projection matrix based on a field of view. + /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovLH( + T fov, T width, T height, T near, T far); + + /// Builds a perspective projection matrix based on a field of view and the default handedness and default near and far clip planes definition. + /// To change default handedness use GLM_FORCE_LEFT_HANDED. To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. + /// + /// @param fov Expressed in radians. + /// @param width Width of the viewport + /// @param height Height of the viewport + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFov( + T fov, T width, T height, T near, T far); + + /// Creates a matrix for a left handed, symmetric perspective-view frustum with far plane at infinite. + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> infinitePerspectiveLH( + T fovy, T aspect, T near); + + /// Creates a matrix for a right handed, symmetric perspective-view frustum with far plane at infinite. + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> infinitePerspectiveRH( + T fovy, T aspect, T near); + + /// Creates a matrix for a symmetric perspective-view frustum with far plane at infinite with default handedness. + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> infinitePerspective( + T fovy, T aspect, T near); + + /// Creates a matrix for a symmetric perspective-view frustum with far plane at infinite for graphics hardware that doesn't support depth clamping. + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> tweakedInfinitePerspective( + T fovy, T aspect, T near); + + /// Creates a matrix for a symmetric perspective-view frustum with far plane at infinite for graphics hardware that doesn't support depth clamping. + /// + /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. + /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). + /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). + /// @param ep Epsilon + /// + /// @tparam T A floating-point scalar type + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> tweakedInfinitePerspective( + T fovy, T aspect, T near, T ep); + + /// @} +}//namespace glm + +#include "matrix_clip_space.inl" diff --git a/MacroLibX/third_party/glm/ext/matrix_clip_space.inl b/MacroLibX/third_party/glm/ext/matrix_clip_space.inl new file mode 100644 index 0000000..7e4df33 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_clip_space.inl @@ -0,0 +1,555 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> ortho(T left, T right, T bottom, T top) + { + mat<4, 4, T, defaultp> Result(static_cast(1)); + Result[0][0] = static_cast(2) / (right - left); + Result[1][1] = static_cast(2) / (top - bottom); + Result[2][2] = - static_cast(1); + Result[3][0] = - (right + left) / (right - left); + Result[3][1] = - (top + bottom) / (top - bottom); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoLH_ZO(T left, T right, T bottom, T top, T zNear, T zFar) + { + mat<4, 4, T, defaultp> Result(1); + Result[0][0] = static_cast(2) / (right - left); + Result[1][1] = static_cast(2) / (top - bottom); + Result[2][2] = static_cast(1) / (zFar - zNear); + Result[3][0] = - (right + left) / (right - left); + Result[3][1] = - (top + bottom) / (top - bottom); + Result[3][2] = - zNear / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoLH_NO(T left, T right, T bottom, T top, T zNear, T zFar) + { + mat<4, 4, T, defaultp> Result(1); + Result[0][0] = static_cast(2) / (right - left); + Result[1][1] = static_cast(2) / (top - bottom); + Result[2][2] = static_cast(2) / (zFar - zNear); + Result[3][0] = - (right + left) / (right - left); + Result[3][1] = - (top + bottom) / (top - bottom); + Result[3][2] = - (zFar + zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoRH_ZO(T left, T right, T bottom, T top, T zNear, T zFar) + { + mat<4, 4, T, defaultp> Result(1); + Result[0][0] = static_cast(2) / (right - left); + Result[1][1] = static_cast(2) / (top - bottom); + Result[2][2] = - static_cast(1) / (zFar - zNear); + Result[3][0] = - (right + left) / (right - left); + Result[3][1] = - (top + bottom) / (top - bottom); + Result[3][2] = - zNear / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoRH_NO(T left, T right, T bottom, T top, T zNear, T zFar) + { + mat<4, 4, T, defaultp> Result(1); + Result[0][0] = static_cast(2) / (right - left); + Result[1][1] = static_cast(2) / (top - bottom); + Result[2][2] = - static_cast(2) / (zFar - zNear); + Result[3][0] = - (right + left) / (right - left); + Result[3][1] = - (top + bottom) / (top - bottom); + Result[3][2] = - (zFar + zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoZO(T left, T right, T bottom, T top, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return orthoLH_ZO(left, right, bottom, top, zNear, zFar); +# else + return orthoRH_ZO(left, right, bottom, top, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoNO(T left, T right, T bottom, T top, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return orthoLH_NO(left, right, bottom, top, zNear, zFar); +# else + return orthoRH_NO(left, right, bottom, top, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoLH(T left, T right, T bottom, T top, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT + return orthoLH_ZO(left, right, bottom, top, zNear, zFar); +# else + return orthoLH_NO(left, right, bottom, top, zNear, zFar); +# endif + + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoRH(T left, T right, T bottom, T top, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT + return orthoRH_ZO(left, right, bottom, top, zNear, zFar); +# else + return orthoRH_NO(left, right, bottom, top, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> ortho(T left, T right, T bottom, T top, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_ZO + return orthoLH_ZO(left, right, bottom, top, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_NO + return orthoLH_NO(left, right, bottom, top, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_ZO + return orthoRH_ZO(left, right, bottom, top, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_NO + return orthoRH_NO(left, right, bottom, top, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumLH_ZO(T left, T right, T bottom, T top, T nearVal, T farVal) + { + mat<4, 4, T, defaultp> Result(0); + Result[0][0] = (static_cast(2) * nearVal) / (right - left); + Result[1][1] = (static_cast(2) * nearVal) / (top - bottom); + Result[2][0] = (right + left) / (right - left); + Result[2][1] = (top + bottom) / (top - bottom); + Result[2][2] = farVal / (farVal - nearVal); + Result[2][3] = static_cast(1); + Result[3][2] = -(farVal * nearVal) / (farVal - nearVal); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumLH_NO(T left, T right, T bottom, T top, T nearVal, T farVal) + { + mat<4, 4, T, defaultp> Result(0); + Result[0][0] = (static_cast(2) * nearVal) / (right - left); + Result[1][1] = (static_cast(2) * nearVal) / (top - bottom); + Result[2][0] = (right + left) / (right - left); + Result[2][1] = (top + bottom) / (top - bottom); + Result[2][2] = (farVal + nearVal) / (farVal - nearVal); + Result[2][3] = static_cast(1); + Result[3][2] = - (static_cast(2) * farVal * nearVal) / (farVal - nearVal); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumRH_ZO(T left, T right, T bottom, T top, T nearVal, T farVal) + { + mat<4, 4, T, defaultp> Result(0); + Result[0][0] = (static_cast(2) * nearVal) / (right - left); + Result[1][1] = (static_cast(2) * nearVal) / (top - bottom); + Result[2][0] = (right + left) / (right - left); + Result[2][1] = (top + bottom) / (top - bottom); + Result[2][2] = farVal / (nearVal - farVal); + Result[2][3] = static_cast(-1); + Result[3][2] = -(farVal * nearVal) / (farVal - nearVal); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumRH_NO(T left, T right, T bottom, T top, T nearVal, T farVal) + { + mat<4, 4, T, defaultp> Result(0); + Result[0][0] = (static_cast(2) * nearVal) / (right - left); + Result[1][1] = (static_cast(2) * nearVal) / (top - bottom); + Result[2][0] = (right + left) / (right - left); + Result[2][1] = (top + bottom) / (top - bottom); + Result[2][2] = - (farVal + nearVal) / (farVal - nearVal); + Result[2][3] = static_cast(-1); + Result[3][2] = - (static_cast(2) * farVal * nearVal) / (farVal - nearVal); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumZO(T left, T right, T bottom, T top, T nearVal, T farVal) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return frustumLH_ZO(left, right, bottom, top, nearVal, farVal); +# else + return frustumRH_ZO(left, right, bottom, top, nearVal, farVal); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumNO(T left, T right, T bottom, T top, T nearVal, T farVal) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return frustumLH_NO(left, right, bottom, top, nearVal, farVal); +# else + return frustumRH_NO(left, right, bottom, top, nearVal, farVal); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumLH(T left, T right, T bottom, T top, T nearVal, T farVal) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT + return frustumLH_ZO(left, right, bottom, top, nearVal, farVal); +# else + return frustumLH_NO(left, right, bottom, top, nearVal, farVal); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumRH(T left, T right, T bottom, T top, T nearVal, T farVal) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT + return frustumRH_ZO(left, right, bottom, top, nearVal, farVal); +# else + return frustumRH_NO(left, right, bottom, top, nearVal, farVal); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustum(T left, T right, T bottom, T top, T nearVal, T farVal) + { +# if GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_ZO + return frustumLH_ZO(left, right, bottom, top, nearVal, farVal); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_NO + return frustumLH_NO(left, right, bottom, top, nearVal, farVal); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_ZO + return frustumRH_ZO(left, right, bottom, top, nearVal, farVal); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_NO + return frustumRH_NO(left, right, bottom, top, nearVal, farVal); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveRH_ZO(T fovy, T aspect, T zNear, T zFar) + { + assert(abs(aspect - std::numeric_limits::epsilon()) > static_cast(0)); + + T const tanHalfFovy = tan(fovy / static_cast(2)); + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = static_cast(1) / (aspect * tanHalfFovy); + Result[1][1] = static_cast(1) / (tanHalfFovy); + Result[2][2] = zFar / (zNear - zFar); + Result[2][3] = - static_cast(1); + Result[3][2] = -(zFar * zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveRH_NO(T fovy, T aspect, T zNear, T zFar) + { + assert(abs(aspect - std::numeric_limits::epsilon()) > static_cast(0)); + + T const tanHalfFovy = tan(fovy / static_cast(2)); + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = static_cast(1) / (aspect * tanHalfFovy); + Result[1][1] = static_cast(1) / (tanHalfFovy); + Result[2][2] = - (zFar + zNear) / (zFar - zNear); + Result[2][3] = - static_cast(1); + Result[3][2] = - (static_cast(2) * zFar * zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveLH_ZO(T fovy, T aspect, T zNear, T zFar) + { + assert(abs(aspect - std::numeric_limits::epsilon()) > static_cast(0)); + + T const tanHalfFovy = tan(fovy / static_cast(2)); + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = static_cast(1) / (aspect * tanHalfFovy); + Result[1][1] = static_cast(1) / (tanHalfFovy); + Result[2][2] = zFar / (zFar - zNear); + Result[2][3] = static_cast(1); + Result[3][2] = -(zFar * zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveLH_NO(T fovy, T aspect, T zNear, T zFar) + { + assert(abs(aspect - std::numeric_limits::epsilon()) > static_cast(0)); + + T const tanHalfFovy = tan(fovy / static_cast(2)); + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = static_cast(1) / (aspect * tanHalfFovy); + Result[1][1] = static_cast(1) / (tanHalfFovy); + Result[2][2] = (zFar + zNear) / (zFar - zNear); + Result[2][3] = static_cast(1); + Result[3][2] = - (static_cast(2) * zFar * zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveZO(T fovy, T aspect, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return perspectiveLH_ZO(fovy, aspect, zNear, zFar); +# else + return perspectiveRH_ZO(fovy, aspect, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveNO(T fovy, T aspect, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return perspectiveLH_NO(fovy, aspect, zNear, zFar); +# else + return perspectiveRH_NO(fovy, aspect, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveLH(T fovy, T aspect, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT + return perspectiveLH_ZO(fovy, aspect, zNear, zFar); +# else + return perspectiveLH_NO(fovy, aspect, zNear, zFar); +# endif + + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveRH(T fovy, T aspect, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT + return perspectiveRH_ZO(fovy, aspect, zNear, zFar); +# else + return perspectiveRH_NO(fovy, aspect, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspective(T fovy, T aspect, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_ZO + return perspectiveLH_ZO(fovy, aspect, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_NO + return perspectiveLH_NO(fovy, aspect, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_ZO + return perspectiveRH_ZO(fovy, aspect, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_NO + return perspectiveRH_NO(fovy, aspect, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovRH_ZO(T fov, T width, T height, T zNear, T zFar) + { + assert(width > static_cast(0)); + assert(height > static_cast(0)); + assert(fov > static_cast(0)); + + T const rad = fov; + T const h = glm::cos(static_cast(0.5) * rad) / glm::sin(static_cast(0.5) * rad); + T const w = h * height / width; ///todo max(width , Height) / min(width , Height)? + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = w; + Result[1][1] = h; + Result[2][2] = zFar / (zNear - zFar); + Result[2][3] = - static_cast(1); + Result[3][2] = -(zFar * zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovRH_NO(T fov, T width, T height, T zNear, T zFar) + { + assert(width > static_cast(0)); + assert(height > static_cast(0)); + assert(fov > static_cast(0)); + + T const rad = fov; + T const h = glm::cos(static_cast(0.5) * rad) / glm::sin(static_cast(0.5) * rad); + T const w = h * height / width; ///todo max(width , Height) / min(width , Height)? + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = w; + Result[1][1] = h; + Result[2][2] = - (zFar + zNear) / (zFar - zNear); + Result[2][3] = - static_cast(1); + Result[3][2] = - (static_cast(2) * zFar * zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovLH_ZO(T fov, T width, T height, T zNear, T zFar) + { + assert(width > static_cast(0)); + assert(height > static_cast(0)); + assert(fov > static_cast(0)); + + T const rad = fov; + T const h = glm::cos(static_cast(0.5) * rad) / glm::sin(static_cast(0.5) * rad); + T const w = h * height / width; ///todo max(width , Height) / min(width , Height)? + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = w; + Result[1][1] = h; + Result[2][2] = zFar / (zFar - zNear); + Result[2][3] = static_cast(1); + Result[3][2] = -(zFar * zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovLH_NO(T fov, T width, T height, T zNear, T zFar) + { + assert(width > static_cast(0)); + assert(height > static_cast(0)); + assert(fov > static_cast(0)); + + T const rad = fov; + T const h = glm::cos(static_cast(0.5) * rad) / glm::sin(static_cast(0.5) * rad); + T const w = h * height / width; ///todo max(width , Height) / min(width , Height)? + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = w; + Result[1][1] = h; + Result[2][2] = (zFar + zNear) / (zFar - zNear); + Result[2][3] = static_cast(1); + Result[3][2] = - (static_cast(2) * zFar * zNear) / (zFar - zNear); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovZO(T fov, T width, T height, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return perspectiveFovLH_ZO(fov, width, height, zNear, zFar); +# else + return perspectiveFovRH_ZO(fov, width, height, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovNO(T fov, T width, T height, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return perspectiveFovLH_NO(fov, width, height, zNear, zFar); +# else + return perspectiveFovRH_NO(fov, width, height, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovLH(T fov, T width, T height, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT + return perspectiveFovLH_ZO(fov, width, height, zNear, zFar); +# else + return perspectiveFovLH_NO(fov, width, height, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovRH(T fov, T width, T height, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT + return perspectiveFovRH_ZO(fov, width, height, zNear, zFar); +# else + return perspectiveFovRH_NO(fov, width, height, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFov(T fov, T width, T height, T zNear, T zFar) + { +# if GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_ZO + return perspectiveFovLH_ZO(fov, width, height, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_NO + return perspectiveFovLH_NO(fov, width, height, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_ZO + return perspectiveFovRH_ZO(fov, width, height, zNear, zFar); +# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_NO + return perspectiveFovRH_NO(fov, width, height, zNear, zFar); +# endif + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> infinitePerspectiveRH(T fovy, T aspect, T zNear) + { + T const range = tan(fovy / static_cast(2)) * zNear; + T const left = -range * aspect; + T const right = range * aspect; + T const bottom = -range; + T const top = range; + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = (static_cast(2) * zNear) / (right - left); + Result[1][1] = (static_cast(2) * zNear) / (top - bottom); + Result[2][2] = - static_cast(1); + Result[2][3] = - static_cast(1); + Result[3][2] = - static_cast(2) * zNear; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> infinitePerspectiveLH(T fovy, T aspect, T zNear) + { + T const range = tan(fovy / static_cast(2)) * zNear; + T const left = -range * aspect; + T const right = range * aspect; + T const bottom = -range; + T const top = range; + + mat<4, 4, T, defaultp> Result(T(0)); + Result[0][0] = (static_cast(2) * zNear) / (right - left); + Result[1][1] = (static_cast(2) * zNear) / (top - bottom); + Result[2][2] = static_cast(1); + Result[2][3] = static_cast(1); + Result[3][2] = - static_cast(2) * zNear; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> infinitePerspective(T fovy, T aspect, T zNear) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return infinitePerspectiveLH(fovy, aspect, zNear); +# else + return infinitePerspectiveRH(fovy, aspect, zNear); +# endif + } + + // Infinite projection matrix: http://www.terathon.com/gdc07_lengyel.pdf + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> tweakedInfinitePerspective(T fovy, T aspect, T zNear, T ep) + { + T const range = tan(fovy / static_cast(2)) * zNear; + T const left = -range * aspect; + T const right = range * aspect; + T const bottom = -range; + T const top = range; + + mat<4, 4, T, defaultp> Result(static_cast(0)); + Result[0][0] = (static_cast(2) * zNear) / (right - left); + Result[1][1] = (static_cast(2) * zNear) / (top - bottom); + Result[2][2] = ep - static_cast(1); + Result[2][3] = static_cast(-1); + Result[3][2] = (ep - static_cast(2)) * zNear; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> tweakedInfinitePerspective(T fovy, T aspect, T zNear) + { + return tweakedInfinitePerspective(fovy, aspect, zNear, epsilon()); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_common.hpp b/MacroLibX/third_party/glm/ext/matrix_common.hpp new file mode 100644 index 0000000..05c3799 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_common.hpp @@ -0,0 +1,36 @@ +/// @ref ext_matrix_common +/// @file glm/ext/matrix_common.hpp +/// +/// @defgroup ext_matrix_common GLM_EXT_matrix_common +/// @ingroup ext +/// +/// Defines functions for common matrix operations. +/// +/// Include to use the features of this extension. +/// +/// @see ext_matrix_common + +#pragma once + +#include "../detail/qualifier.hpp" +#include "../detail/_fixes.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_matrix_transform extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_matrix_common + /// @{ + + template + GLM_FUNC_DECL mat mix(mat const& x, mat const& y, mat const& a); + + template + GLM_FUNC_DECL mat mix(mat const& x, mat const& y, U a); + + /// @} +}//namespace glm + +#include "matrix_common.inl" diff --git a/MacroLibX/third_party/glm/ext/matrix_common.inl b/MacroLibX/third_party/glm/ext/matrix_common.inl new file mode 100644 index 0000000..9d50848 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_common.inl @@ -0,0 +1,16 @@ +#include "../matrix.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat mix(mat const& x, mat const& y, U a) + { + return mat(x) * (static_cast(1) - a) + mat(y) * a; + } + + template + GLM_FUNC_QUALIFIER mat mix(mat const& x, mat const& y, mat const& a) + { + return matrixCompMult(mat(x), static_cast(1) - a) + matrixCompMult(mat(y), a); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double2x2.hpp b/MacroLibX/third_party/glm/ext/matrix_double2x2.hpp new file mode 100644 index 0000000..94dca54 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double2x2.hpp @@ -0,0 +1,23 @@ +/// @ref core +/// @file glm/ext/matrix_double2x2.hpp + +#pragma once +#include "../detail/type_mat2x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 2 columns of 2 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<2, 2, double, defaultp> dmat2x2; + + /// 2 columns of 2 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<2, 2, double, defaultp> dmat2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double2x2_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double2x2_precision.hpp new file mode 100644 index 0000000..9e2c174 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double2x2_precision.hpp @@ -0,0 +1,49 @@ +/// @ref core +/// @file glm/ext/matrix_double2x2_precision.hpp + +#pragma once +#include "../detail/type_mat2x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 2 columns of 2 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, double, lowp> lowp_dmat2; + + /// 2 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, double, mediump> mediump_dmat2; + + /// 2 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, double, highp> highp_dmat2; + + /// 2 columns of 2 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, double, lowp> lowp_dmat2x2; + + /// 2 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, double, mediump> mediump_dmat2x2; + + /// 2 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, double, highp> highp_dmat2x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double2x3.hpp b/MacroLibX/third_party/glm/ext/matrix_double2x3.hpp new file mode 100644 index 0000000..bfef87a --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double2x3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_double2x3.hpp + +#pragma once +#include "../detail/type_mat2x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 2 columns of 3 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<2, 3, double, defaultp> dmat2x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double2x3_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double2x3_precision.hpp new file mode 100644 index 0000000..098fb60 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double2x3_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_double2x3_precision.hpp + +#pragma once +#include "../detail/type_mat2x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 2 columns of 3 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 3, double, lowp> lowp_dmat2x3; + + /// 2 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 3, double, mediump> mediump_dmat2x3; + + /// 2 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 3, double, highp> highp_dmat2x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double2x4.hpp b/MacroLibX/third_party/glm/ext/matrix_double2x4.hpp new file mode 100644 index 0000000..499284b --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double2x4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_double2x4.hpp + +#pragma once +#include "../detail/type_mat2x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 2 columns of 4 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<2, 4, double, defaultp> dmat2x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double2x4_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double2x4_precision.hpp new file mode 100644 index 0000000..9b61ebc --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double2x4_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_double2x4_precision.hpp + +#pragma once +#include "../detail/type_mat2x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 2 columns of 4 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 4, double, lowp> lowp_dmat2x4; + + /// 2 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 4, double, mediump> mediump_dmat2x4; + + /// 2 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 4, double, highp> highp_dmat2x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double3x2.hpp b/MacroLibX/third_party/glm/ext/matrix_double3x2.hpp new file mode 100644 index 0000000..dd23f36 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double3x2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_double3x2.hpp + +#pragma once +#include "../detail/type_mat3x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 3 columns of 2 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<3, 2, double, defaultp> dmat3x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double3x2_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double3x2_precision.hpp new file mode 100644 index 0000000..068d9e9 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double3x2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_double3x2_precision.hpp + +#pragma once +#include "../detail/type_mat3x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 3 columns of 2 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 2, double, lowp> lowp_dmat3x2; + + /// 3 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 2, double, mediump> mediump_dmat3x2; + + /// 3 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 2, double, highp> highp_dmat3x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double3x3.hpp b/MacroLibX/third_party/glm/ext/matrix_double3x3.hpp new file mode 100644 index 0000000..53572b7 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double3x3.hpp @@ -0,0 +1,23 @@ +/// @ref core +/// @file glm/ext/matrix_double3x3.hpp + +#pragma once +#include "../detail/type_mat3x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 3 columns of 3 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<3, 3, double, defaultp> dmat3x3; + + /// 3 columns of 3 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<3, 3, double, defaultp> dmat3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double3x3_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double3x3_precision.hpp new file mode 100644 index 0000000..8691e78 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double3x3_precision.hpp @@ -0,0 +1,49 @@ +/// @ref core +/// @file glm/ext/matrix_double3x3_precision.hpp + +#pragma once +#include "../detail/type_mat3x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 3 columns of 3 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, double, lowp> lowp_dmat3; + + /// 3 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, double, mediump> mediump_dmat3; + + /// 3 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, double, highp> highp_dmat3; + + /// 3 columns of 3 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, double, lowp> lowp_dmat3x3; + + /// 3 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, double, mediump> mediump_dmat3x3; + + /// 3 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, double, highp> highp_dmat3x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double3x4.hpp b/MacroLibX/third_party/glm/ext/matrix_double3x4.hpp new file mode 100644 index 0000000..c572d63 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double3x4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_double3x4.hpp + +#pragma once +#include "../detail/type_mat3x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 3 columns of 4 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<3, 4, double, defaultp> dmat3x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double3x4_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double3x4_precision.hpp new file mode 100644 index 0000000..f040217 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double3x4_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_double3x4_precision.hpp + +#pragma once +#include "../detail/type_mat3x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 3 columns of 4 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 4, double, lowp> lowp_dmat3x4; + + /// 3 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 4, double, mediump> mediump_dmat3x4; + + /// 3 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 4, double, highp> highp_dmat3x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double4x2.hpp b/MacroLibX/third_party/glm/ext/matrix_double4x2.hpp new file mode 100644 index 0000000..9b229f4 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double4x2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_double4x2.hpp + +#pragma once +#include "../detail/type_mat4x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 4 columns of 2 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<4, 2, double, defaultp> dmat4x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double4x2_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double4x2_precision.hpp new file mode 100644 index 0000000..6ad18ba --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double4x2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_double4x2_precision.hpp + +#pragma once +#include "../detail/type_mat4x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 4 columns of 2 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 2, double, lowp> lowp_dmat4x2; + + /// 4 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 2, double, mediump> mediump_dmat4x2; + + /// 4 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 2, double, highp> highp_dmat4x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double4x3.hpp b/MacroLibX/third_party/glm/ext/matrix_double4x3.hpp new file mode 100644 index 0000000..dca4cf9 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double4x3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_double4x3.hpp + +#pragma once +#include "../detail/type_mat4x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 4 columns of 3 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<4, 3, double, defaultp> dmat4x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double4x3_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double4x3_precision.hpp new file mode 100644 index 0000000..f7371de --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double4x3_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_double4x3_precision.hpp + +#pragma once +#include "../detail/type_mat4x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 4 columns of 3 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 3, double, lowp> lowp_dmat4x3; + + /// 4 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 3, double, mediump> mediump_dmat4x3; + + /// 4 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 3, double, highp> highp_dmat4x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double4x4.hpp b/MacroLibX/third_party/glm/ext/matrix_double4x4.hpp new file mode 100644 index 0000000..81e1bf6 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double4x4.hpp @@ -0,0 +1,23 @@ +/// @ref core +/// @file glm/ext/matrix_double4x4.hpp + +#pragma once +#include "../detail/type_mat4x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 4 columns of 4 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<4, 4, double, defaultp> dmat4x4; + + /// 4 columns of 4 components matrix of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<4, 4, double, defaultp> dmat4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_double4x4_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_double4x4_precision.hpp new file mode 100644 index 0000000..4c36a84 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_double4x4_precision.hpp @@ -0,0 +1,49 @@ +/// @ref core +/// @file glm/ext/matrix_double4x4_precision.hpp + +#pragma once +#include "../detail/type_mat4x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 4 columns of 4 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, double, lowp> lowp_dmat4; + + /// 4 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, double, mediump> mediump_dmat4; + + /// 4 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, double, highp> highp_dmat4; + + /// 4 columns of 4 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, double, lowp> lowp_dmat4x4; + + /// 4 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, double, mediump> mediump_dmat4x4; + + /// 4 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, double, highp> highp_dmat4x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float2x2.hpp b/MacroLibX/third_party/glm/ext/matrix_float2x2.hpp new file mode 100644 index 0000000..53df921 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float2x2.hpp @@ -0,0 +1,23 @@ +/// @ref core +/// @file glm/ext/matrix_float2x2.hpp + +#pragma once +#include "../detail/type_mat2x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 2 columns of 2 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<2, 2, float, defaultp> mat2x2; + + /// 2 columns of 2 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<2, 2, float, defaultp> mat2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float2x2_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float2x2_precision.hpp new file mode 100644 index 0000000..898b6db --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float2x2_precision.hpp @@ -0,0 +1,49 @@ +/// @ref core +/// @file glm/ext/matrix_float2x2_precision.hpp + +#pragma once +#include "../detail/type_mat2x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 2 columns of 2 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, float, lowp> lowp_mat2; + + /// 2 columns of 2 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, float, mediump> mediump_mat2; + + /// 2 columns of 2 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, float, highp> highp_mat2; + + /// 2 columns of 2 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, float, lowp> lowp_mat2x2; + + /// 2 columns of 2 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, float, mediump> mediump_mat2x2; + + /// 2 columns of 2 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 2, float, highp> highp_mat2x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float2x3.hpp b/MacroLibX/third_party/glm/ext/matrix_float2x3.hpp new file mode 100644 index 0000000..6f68822 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float2x3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_float2x3.hpp + +#pragma once +#include "../detail/type_mat2x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 2 columns of 3 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<2, 3, float, defaultp> mat2x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float2x3_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float2x3_precision.hpp new file mode 100644 index 0000000..50c1032 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float2x3_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_float2x3_precision.hpp + +#pragma once +#include "../detail/type_mat2x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 2 columns of 3 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 3, float, lowp> lowp_mat2x3; + + /// 2 columns of 3 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 3, float, mediump> mediump_mat2x3; + + /// 2 columns of 3 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 3, float, highp> highp_mat2x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float2x4.hpp b/MacroLibX/third_party/glm/ext/matrix_float2x4.hpp new file mode 100644 index 0000000..30f30de --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float2x4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_float2x4.hpp + +#pragma once +#include "../detail/type_mat2x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 2 columns of 4 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<2, 4, float, defaultp> mat2x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float2x4_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float2x4_precision.hpp new file mode 100644 index 0000000..079d638 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float2x4_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_float2x4_precision.hpp + +#pragma once +#include "../detail/type_mat2x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 2 columns of 4 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 4, float, lowp> lowp_mat2x4; + + /// 2 columns of 4 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 4, float, mediump> mediump_mat2x4; + + /// 2 columns of 4 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<2, 4, float, highp> highp_mat2x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float3x2.hpp b/MacroLibX/third_party/glm/ext/matrix_float3x2.hpp new file mode 100644 index 0000000..d39dd2f --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float3x2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_float3x2.hpp + +#pragma once +#include "../detail/type_mat3x2.hpp" + +namespace glm +{ + /// @addtogroup core + /// @{ + + /// 3 columns of 2 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<3, 2, float, defaultp> mat3x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float3x2_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float3x2_precision.hpp new file mode 100644 index 0000000..8572c2a --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float3x2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_float3x2_precision.hpp + +#pragma once +#include "../detail/type_mat3x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 3 columns of 2 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 2, float, lowp> lowp_mat3x2; + + /// 3 columns of 2 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 2, float, mediump> mediump_mat3x2; + + /// 3 columns of 2 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 2, float, highp> highp_mat3x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float3x3.hpp b/MacroLibX/third_party/glm/ext/matrix_float3x3.hpp new file mode 100644 index 0000000..177d809 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float3x3.hpp @@ -0,0 +1,23 @@ +/// @ref core +/// @file glm/ext/matrix_float3x3.hpp + +#pragma once +#include "../detail/type_mat3x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 3 columns of 3 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<3, 3, float, defaultp> mat3x3; + + /// 3 columns of 3 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<3, 3, float, defaultp> mat3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float3x3_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float3x3_precision.hpp new file mode 100644 index 0000000..8a900c1 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float3x3_precision.hpp @@ -0,0 +1,49 @@ +/// @ref core +/// @file glm/ext/matrix_float3x3_precision.hpp + +#pragma once +#include "../detail/type_mat3x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 3 columns of 3 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, float, lowp> lowp_mat3; + + /// 3 columns of 3 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, float, mediump> mediump_mat3; + + /// 3 columns of 3 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, float, highp> highp_mat3; + + /// 3 columns of 3 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, float, lowp> lowp_mat3x3; + + /// 3 columns of 3 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, float, mediump> mediump_mat3x3; + + /// 3 columns of 3 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 3, float, highp> highp_mat3x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float3x4.hpp b/MacroLibX/third_party/glm/ext/matrix_float3x4.hpp new file mode 100644 index 0000000..64b8459 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float3x4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_float3x4.hpp + +#pragma once +#include "../detail/type_mat3x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 3 columns of 4 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<3, 4, float, defaultp> mat3x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float3x4_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float3x4_precision.hpp new file mode 100644 index 0000000..bc36bf1 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float3x4_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_float3x4_precision.hpp + +#pragma once +#include "../detail/type_mat3x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 3 columns of 4 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 4, float, lowp> lowp_mat3x4; + + /// 3 columns of 4 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 4, float, mediump> mediump_mat3x4; + + /// 3 columns of 4 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<3, 4, float, highp> highp_mat3x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float4x2.hpp b/MacroLibX/third_party/glm/ext/matrix_float4x2.hpp new file mode 100644 index 0000000..1ed5227 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float4x2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_float4x2.hpp + +#pragma once +#include "../detail/type_mat4x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 4 columns of 2 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<4, 2, float, defaultp> mat4x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float4x2_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float4x2_precision.hpp new file mode 100644 index 0000000..88fd069 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float4x2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_float2x2_precision.hpp + +#pragma once +#include "../detail/type_mat2x2.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 4 columns of 2 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 2, float, lowp> lowp_mat4x2; + + /// 4 columns of 2 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 2, float, mediump> mediump_mat4x2; + + /// 4 columns of 2 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 2, float, highp> highp_mat4x2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float4x3.hpp b/MacroLibX/third_party/glm/ext/matrix_float4x3.hpp new file mode 100644 index 0000000..5dbe765 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float4x3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/matrix_float4x3.hpp + +#pragma once +#include "../detail/type_mat4x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix + /// @{ + + /// 4 columns of 3 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<4, 3, float, defaultp> mat4x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float4x3_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float4x3_precision.hpp new file mode 100644 index 0000000..846ed4f --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float4x3_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/matrix_float4x3_precision.hpp + +#pragma once +#include "../detail/type_mat4x3.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 4 columns of 3 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 3, float, lowp> lowp_mat4x3; + + /// 4 columns of 3 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 3, float, mediump> mediump_mat4x3; + + /// 4 columns of 3 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 3, float, highp> highp_mat4x3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float4x4.hpp b/MacroLibX/third_party/glm/ext/matrix_float4x4.hpp new file mode 100644 index 0000000..5ba111d --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float4x4.hpp @@ -0,0 +1,23 @@ +/// @ref core +/// @file glm/ext/matrix_float4x4.hpp + +#pragma once +#include "../detail/type_mat4x4.hpp" + +namespace glm +{ + /// @ingroup core_matrix + /// @{ + + /// 4 columns of 4 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<4, 4, float, defaultp> mat4x4; + + /// 4 columns of 4 components matrix of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + typedef mat<4, 4, float, defaultp> mat4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_float4x4_precision.hpp b/MacroLibX/third_party/glm/ext/matrix_float4x4_precision.hpp new file mode 100644 index 0000000..597149b --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_float4x4_precision.hpp @@ -0,0 +1,49 @@ +/// @ref core +/// @file glm/ext/matrix_float4x4_precision.hpp + +#pragma once +#include "../detail/type_mat4x4.hpp" + +namespace glm +{ + /// @addtogroup core_matrix_precision + /// @{ + + /// 4 columns of 4 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, float, lowp> lowp_mat4; + + /// 4 columns of 4 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, float, mediump> mediump_mat4; + + /// 4 columns of 4 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, float, highp> highp_mat4; + + /// 4 columns of 4 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, float, lowp> lowp_mat4x4; + + /// 4 columns of 4 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, float, mediump> mediump_mat4x4; + + /// 4 columns of 4 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef mat<4, 4, float, highp> highp_mat4x4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_projection.hpp b/MacroLibX/third_party/glm/ext/matrix_projection.hpp new file mode 100644 index 0000000..51fd01b --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_projection.hpp @@ -0,0 +1,149 @@ +/// @ref ext_matrix_projection +/// @file glm/ext/matrix_projection.hpp +/// +/// @defgroup ext_matrix_projection GLM_EXT_matrix_projection +/// @ingroup ext +/// +/// Functions that generate common projection transformation matrices. +/// +/// The matrices generated by this extension use standard OpenGL fixed-function +/// conventions. For example, the lookAt function generates a transform from world +/// space into the specific eye space that the projective matrix functions +/// (perspective, ortho, etc) are designed to expect. The OpenGL compatibility +/// specifications defines the particular layout of this eye space. +/// +/// Include to use the features of this extension. +/// +/// @see ext_matrix_transform +/// @see ext_matrix_clip_space + +#pragma once + +// Dependencies +#include "../gtc/constants.hpp" +#include "../geometric.hpp" +#include "../trigonometric.hpp" +#include "../matrix.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_matrix_projection extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_matrix_projection + /// @{ + + /// Map the specified object coordinates (obj.x, obj.y, obj.z) into window coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @param obj Specify the object coordinates. + /// @param model Specifies the current modelview matrix + /// @param proj Specifies the current projection matrix + /// @param viewport Specifies the current viewport + /// @return Return the computed window coordinates. + /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. + /// @tparam U Currently supported: Floating-point types and integer types. + /// + /// @see gluProject man page + template + GLM_FUNC_DECL vec<3, T, Q> projectZO( + vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); + + /// Map the specified object coordinates (obj.x, obj.y, obj.z) into window coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param obj Specify the object coordinates. + /// @param model Specifies the current modelview matrix + /// @param proj Specifies the current projection matrix + /// @param viewport Specifies the current viewport + /// @return Return the computed window coordinates. + /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. + /// @tparam U Currently supported: Floating-point types and integer types. + /// + /// @see gluProject man page + template + GLM_FUNC_DECL vec<3, T, Q> projectNO( + vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); + + /// Map the specified object coordinates (obj.x, obj.y, obj.z) into window coordinates using default near and far clip planes definition. + /// To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. + /// + /// @param obj Specify the object coordinates. + /// @param model Specifies the current modelview matrix + /// @param proj Specifies the current projection matrix + /// @param viewport Specifies the current viewport + /// @return Return the computed window coordinates. + /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. + /// @tparam U Currently supported: Floating-point types and integer types. + /// + /// @see gluProject man page + template + GLM_FUNC_DECL vec<3, T, Q> project( + vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); + + /// Map the specified window coordinates (win.x, win.y, win.z) into object coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) + /// + /// @param win Specify the window coordinates to be mapped. + /// @param model Specifies the modelview matrix + /// @param proj Specifies the projection matrix + /// @param viewport Specifies the viewport + /// @return Returns the computed object coordinates. + /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. + /// @tparam U Currently supported: Floating-point types and integer types. + /// + /// @see gluUnProject man page + template + GLM_FUNC_DECL vec<3, T, Q> unProjectZO( + vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); + + /// Map the specified window coordinates (win.x, win.y, win.z) into object coordinates. + /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) + /// + /// @param win Specify the window coordinates to be mapped. + /// @param model Specifies the modelview matrix + /// @param proj Specifies the projection matrix + /// @param viewport Specifies the viewport + /// @return Returns the computed object coordinates. + /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. + /// @tparam U Currently supported: Floating-point types and integer types. + /// + /// @see gluUnProject man page + template + GLM_FUNC_DECL vec<3, T, Q> unProjectNO( + vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); + + /// Map the specified window coordinates (win.x, win.y, win.z) into object coordinates using default near and far clip planes definition. + /// To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. + /// + /// @param win Specify the window coordinates to be mapped. + /// @param model Specifies the modelview matrix + /// @param proj Specifies the projection matrix + /// @param viewport Specifies the viewport + /// @return Returns the computed object coordinates. + /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. + /// @tparam U Currently supported: Floating-point types and integer types. + /// + /// @see gluUnProject man page + template + GLM_FUNC_DECL vec<3, T, Q> unProject( + vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); + + /// Define a picking region + /// + /// @param center Specify the center of a picking region in window coordinates. + /// @param delta Specify the width and height, respectively, of the picking region in window coordinates. + /// @param viewport Rendering viewport + /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. + /// @tparam U Currently supported: Floating-point types and integer types. + /// + /// @see gluPickMatrix man page + template + GLM_FUNC_DECL mat<4, 4, T, Q> pickMatrix( + vec<2, T, Q> const& center, vec<2, T, Q> const& delta, vec<4, U, Q> const& viewport); + + /// @} +}//namespace glm + +#include "matrix_projection.inl" diff --git a/MacroLibX/third_party/glm/ext/matrix_projection.inl b/MacroLibX/third_party/glm/ext/matrix_projection.inl new file mode 100644 index 0000000..8b4eea9 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_projection.inl @@ -0,0 +1,104 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> projectZO(vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) + { + vec<4, T, Q> tmp = vec<4, T, Q>(obj, static_cast(1)); + tmp = model * tmp; + tmp = proj * tmp; + + tmp /= tmp.w; + tmp.x = tmp.x * static_cast(0.5) + static_cast(0.5); + tmp.y = tmp.y * static_cast(0.5) + static_cast(0.5); + + tmp[0] = tmp[0] * T(viewport[2]) + T(viewport[0]); + tmp[1] = tmp[1] * T(viewport[3]) + T(viewport[1]); + + return vec<3, T, Q>(tmp); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> projectNO(vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) + { + vec<4, T, Q> tmp = vec<4, T, Q>(obj, static_cast(1)); + tmp = model * tmp; + tmp = proj * tmp; + + tmp /= tmp.w; + tmp = tmp * static_cast(0.5) + static_cast(0.5); + tmp[0] = tmp[0] * T(viewport[2]) + T(viewport[0]); + tmp[1] = tmp[1] * T(viewport[3]) + T(viewport[1]); + + return vec<3, T, Q>(tmp); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> project(vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) + { + if(GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT) + return projectZO(obj, model, proj, viewport); + else + return projectNO(obj, model, proj, viewport); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> unProjectZO(vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) + { + mat<4, 4, T, Q> Inverse = inverse(proj * model); + + vec<4, T, Q> tmp = vec<4, T, Q>(win, T(1)); + tmp.x = (tmp.x - T(viewport[0])) / T(viewport[2]); + tmp.y = (tmp.y - T(viewport[1])) / T(viewport[3]); + tmp.x = tmp.x * static_cast(2) - static_cast(1); + tmp.y = tmp.y * static_cast(2) - static_cast(1); + + vec<4, T, Q> obj = Inverse * tmp; + obj /= obj.w; + + return vec<3, T, Q>(obj); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> unProjectNO(vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) + { + mat<4, 4, T, Q> Inverse = inverse(proj * model); + + vec<4, T, Q> tmp = vec<4, T, Q>(win, T(1)); + tmp.x = (tmp.x - T(viewport[0])) / T(viewport[2]); + tmp.y = (tmp.y - T(viewport[1])) / T(viewport[3]); + tmp = tmp * static_cast(2) - static_cast(1); + + vec<4, T, Q> obj = Inverse * tmp; + obj /= obj.w; + + return vec<3, T, Q>(obj); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> unProject(vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) + { + if(GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT) + return unProjectZO(win, model, proj, viewport); + else + return unProjectNO(win, model, proj, viewport); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> pickMatrix(vec<2, T, Q> const& center, vec<2, T, Q> const& delta, vec<4, U, Q> const& viewport) + { + assert(delta.x > static_cast(0) && delta.y > static_cast(0)); + mat<4, 4, T, Q> Result(static_cast(1)); + + if(!(delta.x > static_cast(0) && delta.y > static_cast(0))) + return Result; // Error + + vec<3, T, Q> Temp( + (static_cast(viewport[2]) - static_cast(2) * (center.x - static_cast(viewport[0]))) / delta.x, + (static_cast(viewport[3]) - static_cast(2) * (center.y - static_cast(viewport[1]))) / delta.y, + static_cast(0)); + + // Translate and scale the picked region to the entire window + Result = translate(Result, Temp); + return scale(Result, vec<3, T, Q>(static_cast(viewport[2]) / delta.x, static_cast(viewport[3]) / delta.y, static_cast(1))); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_relational.hpp b/MacroLibX/third_party/glm/ext/matrix_relational.hpp new file mode 100644 index 0000000..20023ad --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_relational.hpp @@ -0,0 +1,132 @@ +/// @ref ext_matrix_relational +/// @file glm/ext/matrix_relational.hpp +/// +/// @defgroup ext_matrix_relational GLM_EXT_matrix_relational +/// @ingroup ext +/// +/// Exposes comparison functions for matrix types that take a user defined epsilon values. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_relational +/// @see ext_scalar_relational +/// @see ext_quaternion_relational + +#pragma once + +// Dependencies +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_matrix_relational extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_matrix_relational + /// @{ + + /// Perform a component-wise equal-to comparison of two matrices. + /// Return a boolean vector which components value is True if this expression is satisfied per column of the matrices. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y); + + /// Perform a component-wise not-equal-to comparison of two matrices. + /// Return a boolean vector which components value is True if this expression is satisfied per column of the matrices. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y); + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is satisfied. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y, T epsilon); + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is satisfied. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y, vec const& epsilon); + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is not satisfied. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, T epsilon); + + /// Returns the component-wise comparison of |x - y| >= epsilon. + /// True if this expression is not satisfied. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, vec const& epsilon); + + /// Returns the component-wise comparison between two vectors in term of ULPs. + /// True if this expression is satisfied. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y, int ULPs); + + /// Returns the component-wise comparison between two vectors in term of ULPs. + /// True if this expression is satisfied. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y, vec const& ULPs); + + /// Returns the component-wise comparison between two vectors in term of ULPs. + /// True if this expression is not satisfied. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, int ULPs); + + /// Returns the component-wise comparison between two vectors in term of ULPs. + /// True if this expression is not satisfied. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix + /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, vec const& ULPs); + + /// @} +}//namespace glm + +#include "matrix_relational.inl" diff --git a/MacroLibX/third_party/glm/ext/matrix_relational.inl b/MacroLibX/third_party/glm/ext/matrix_relational.inl new file mode 100644 index 0000000..b2b8753 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_relational.inl @@ -0,0 +1,82 @@ +/// @ref ext_vector_relational +/// @file glm/ext/vector_relational.inl + +// Dependency: +#include "../ext/vector_relational.hpp" +#include "../common.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b) + { + return equal(a, b, static_cast(0)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b, T Epsilon) + { + return equal(a, b, vec(Epsilon)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b, vec const& Epsilon) + { + vec Result(true); + for(length_t i = 0; i < C; ++i) + Result[i] = all(equal(a[i], b[i], Epsilon[i])); + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y) + { + return notEqual(x, y, static_cast(0)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, T Epsilon) + { + return notEqual(x, y, vec(Epsilon)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& a, mat const& b, vec const& Epsilon) + { + vec Result(true); + for(length_t i = 0; i < C; ++i) + Result[i] = any(notEqual(a[i], b[i], Epsilon[i])); + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b, int MaxULPs) + { + return equal(a, b, vec(MaxULPs)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b, vec const& MaxULPs) + { + vec Result(true); + for(length_t i = 0; i < C; ++i) + Result[i] = all(equal(a[i], b[i], MaxULPs[i])); + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, int MaxULPs) + { + return notEqual(x, y, vec(MaxULPs)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& a, mat const& b, vec const& MaxULPs) + { + vec Result(true); + for(length_t i = 0; i < C; ++i) + Result[i] = any(notEqual(a[i], b[i], MaxULPs[i])); + return Result; + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/matrix_transform.hpp b/MacroLibX/third_party/glm/ext/matrix_transform.hpp new file mode 100644 index 0000000..cbd187e --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_transform.hpp @@ -0,0 +1,144 @@ +/// @ref ext_matrix_transform +/// @file glm/ext/matrix_transform.hpp +/// +/// @defgroup ext_matrix_transform GLM_EXT_matrix_transform +/// @ingroup ext +/// +/// Defines functions that generate common transformation matrices. +/// +/// The matrices generated by this extension use standard OpenGL fixed-function +/// conventions. For example, the lookAt function generates a transform from world +/// space into the specific eye space that the projective matrix functions +/// (perspective, ortho, etc) are designed to expect. The OpenGL compatibility +/// specifications defines the particular layout of this eye space. +/// +/// Include to use the features of this extension. +/// +/// @see ext_matrix_projection +/// @see ext_matrix_clip_space + +#pragma once + +// Dependencies +#include "../gtc/constants.hpp" +#include "../geometric.hpp" +#include "../trigonometric.hpp" +#include "../matrix.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_matrix_transform extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_matrix_transform + /// @{ + + /// Builds an identity matrix. + template + GLM_FUNC_DECL GLM_CONSTEXPR genType identity(); + + /// Builds a translation 4 * 4 matrix created from a vector of 3 components. + /// + /// @param m Input matrix multiplied by this translation matrix. + /// @param v Coordinates of a translation vector. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + /// + /// @code + /// #include + /// #include + /// ... + /// glm::mat4 m = glm::translate(glm::mat4(1.0f), glm::vec3(1.0f)); + /// // m[0][0] == 1.0f, m[0][1] == 0.0f, m[0][2] == 0.0f, m[0][3] == 0.0f + /// // m[1][0] == 0.0f, m[1][1] == 1.0f, m[1][2] == 0.0f, m[1][3] == 0.0f + /// // m[2][0] == 0.0f, m[2][1] == 0.0f, m[2][2] == 1.0f, m[2][3] == 0.0f + /// // m[3][0] == 1.0f, m[3][1] == 1.0f, m[3][2] == 1.0f, m[3][3] == 1.0f + /// @endcode + /// + /// @see - translate(mat<4, 4, T, Q> const& m, T x, T y, T z) + /// @see - translate(vec<3, T, Q> const& v) + /// @see glTranslate man page + template + GLM_FUNC_DECL mat<4, 4, T, Q> translate( + mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v); + + /// Builds a rotation 4 * 4 matrix created from an axis vector and an angle. + /// + /// @param m Input matrix multiplied by this rotation matrix. + /// @param angle Rotation angle expressed in radians. + /// @param axis Rotation axis, recommended to be normalized. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + /// + /// @see - rotate(mat<4, 4, T, Q> const& m, T angle, T x, T y, T z) + /// @see - rotate(T angle, vec<3, T, Q> const& v) + /// @see glRotate man page + template + GLM_FUNC_DECL mat<4, 4, T, Q> rotate( + mat<4, 4, T, Q> const& m, T angle, vec<3, T, Q> const& axis); + + /// Builds a scale 4 * 4 matrix created from 3 scalars. + /// + /// @param m Input matrix multiplied by this scale matrix. + /// @param v Ratio of scaling for each axis. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + /// + /// @see - scale(mat<4, 4, T, Q> const& m, T x, T y, T z) + /// @see - scale(vec<3, T, Q> const& v) + /// @see glScale man page + template + GLM_FUNC_DECL mat<4, 4, T, Q> scale( + mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v); + + /// Build a right handed look at view matrix. + /// + /// @param eye Position of the camera + /// @param center Position where the camera is looking at + /// @param up Normalized up vector, how the camera is oriented. Typically (0, 0, 1) + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + /// + /// @see - frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) + template + GLM_FUNC_DECL mat<4, 4, T, Q> lookAtRH( + vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up); + + /// Build a left handed look at view matrix. + /// + /// @param eye Position of the camera + /// @param center Position where the camera is looking at + /// @param up Normalized up vector, how the camera is oriented. Typically (0, 0, 1) + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + /// + /// @see - frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) + template + GLM_FUNC_DECL mat<4, 4, T, Q> lookAtLH( + vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up); + + /// Build a look at view matrix based on the default handedness. + /// + /// @param eye Position of the camera + /// @param center Position where the camera is looking at + /// @param up Normalized up vector, how the camera is oriented. Typically (0, 0, 1) + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + /// + /// @see - frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) + /// @see gluLookAt man page + template + GLM_FUNC_DECL mat<4, 4, T, Q> lookAt( + vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up); + + /// @} +}//namespace glm + +#include "matrix_transform.inl" diff --git a/MacroLibX/third_party/glm/ext/matrix_transform.inl b/MacroLibX/third_party/glm/ext/matrix_transform.inl new file mode 100644 index 0000000..a415157 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/matrix_transform.inl @@ -0,0 +1,152 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType identity() + { + return detail::init_gentype::GENTYPE>::identity(); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> translate(mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v) + { + mat<4, 4, T, Q> Result(m); + Result[3] = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rotate(mat<4, 4, T, Q> const& m, T angle, vec<3, T, Q> const& v) + { + T const a = angle; + T const c = cos(a); + T const s = sin(a); + + vec<3, T, Q> axis(normalize(v)); + vec<3, T, Q> temp((T(1) - c) * axis); + + mat<4, 4, T, Q> Rotate; + Rotate[0][0] = c + temp[0] * axis[0]; + Rotate[0][1] = temp[0] * axis[1] + s * axis[2]; + Rotate[0][2] = temp[0] * axis[2] - s * axis[1]; + + Rotate[1][0] = temp[1] * axis[0] - s * axis[2]; + Rotate[1][1] = c + temp[1] * axis[1]; + Rotate[1][2] = temp[1] * axis[2] + s * axis[0]; + + Rotate[2][0] = temp[2] * axis[0] + s * axis[1]; + Rotate[2][1] = temp[2] * axis[1] - s * axis[0]; + Rotate[2][2] = c + temp[2] * axis[2]; + + mat<4, 4, T, Q> Result; + Result[0] = m[0] * Rotate[0][0] + m[1] * Rotate[0][1] + m[2] * Rotate[0][2]; + Result[1] = m[0] * Rotate[1][0] + m[1] * Rotate[1][1] + m[2] * Rotate[1][2]; + Result[2] = m[0] * Rotate[2][0] + m[1] * Rotate[2][1] + m[2] * Rotate[2][2]; + Result[3] = m[3]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rotate_slow(mat<4, 4, T, Q> const& m, T angle, vec<3, T, Q> const& v) + { + T const a = angle; + T const c = cos(a); + T const s = sin(a); + mat<4, 4, T, Q> Result; + + vec<3, T, Q> axis = normalize(v); + + Result[0][0] = c + (static_cast(1) - c) * axis.x * axis.x; + Result[0][1] = (static_cast(1) - c) * axis.x * axis.y + s * axis.z; + Result[0][2] = (static_cast(1) - c) * axis.x * axis.z - s * axis.y; + Result[0][3] = static_cast(0); + + Result[1][0] = (static_cast(1) - c) * axis.y * axis.x - s * axis.z; + Result[1][1] = c + (static_cast(1) - c) * axis.y * axis.y; + Result[1][2] = (static_cast(1) - c) * axis.y * axis.z + s * axis.x; + Result[1][3] = static_cast(0); + + Result[2][0] = (static_cast(1) - c) * axis.z * axis.x + s * axis.y; + Result[2][1] = (static_cast(1) - c) * axis.z * axis.y - s * axis.x; + Result[2][2] = c + (static_cast(1) - c) * axis.z * axis.z; + Result[2][3] = static_cast(0); + + Result[3] = vec<4, T, Q>(0, 0, 0, 1); + return m * Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scale(mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v) + { + mat<4, 4, T, Q> Result; + Result[0] = m[0] * v[0]; + Result[1] = m[1] * v[1]; + Result[2] = m[2] * v[2]; + Result[3] = m[3]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scale_slow(mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v) + { + mat<4, 4, T, Q> Result(T(1)); + Result[0][0] = v.x; + Result[1][1] = v.y; + Result[2][2] = v.z; + return m * Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> lookAtRH(vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up) + { + vec<3, T, Q> const f(normalize(center - eye)); + vec<3, T, Q> const s(normalize(cross(f, up))); + vec<3, T, Q> const u(cross(s, f)); + + mat<4, 4, T, Q> Result(1); + Result[0][0] = s.x; + Result[1][0] = s.y; + Result[2][0] = s.z; + Result[0][1] = u.x; + Result[1][1] = u.y; + Result[2][1] = u.z; + Result[0][2] =-f.x; + Result[1][2] =-f.y; + Result[2][2] =-f.z; + Result[3][0] =-dot(s, eye); + Result[3][1] =-dot(u, eye); + Result[3][2] = dot(f, eye); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> lookAtLH(vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up) + { + vec<3, T, Q> const f(normalize(center - eye)); + vec<3, T, Q> const s(normalize(cross(up, f))); + vec<3, T, Q> const u(cross(f, s)); + + mat<4, 4, T, Q> Result(1); + Result[0][0] = s.x; + Result[1][0] = s.y; + Result[2][0] = s.z; + Result[0][1] = u.x; + Result[1][1] = u.y; + Result[2][1] = u.z; + Result[0][2] = f.x; + Result[1][2] = f.y; + Result[2][2] = f.z; + Result[3][0] = -dot(s, eye); + Result[3][1] = -dot(u, eye); + Result[3][2] = -dot(f, eye); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> lookAt(vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up) + { + GLM_IF_CONSTEXPR(GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT) + return lookAtLH(eye, center, up); + else + return lookAtRH(eye, center, up); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/quaternion_common.hpp b/MacroLibX/third_party/glm/ext/quaternion_common.hpp new file mode 100644 index 0000000..2980ed4 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_common.hpp @@ -0,0 +1,120 @@ +/// @ref ext_quaternion_common +/// @file glm/ext/quaternion_common.hpp +/// +/// @defgroup ext_quaternion_common GLM_EXT_quaternion_common +/// @ingroup ext +/// +/// Provides common functions for quaternion types +/// +/// Include to use the features of this extension. +/// +/// @see ext_scalar_common +/// @see ext_vector_common +/// @see ext_quaternion_float +/// @see ext_quaternion_double +/// @see ext_quaternion_exponential +/// @see ext_quaternion_geometric +/// @see ext_quaternion_relational +/// @see ext_quaternion_trigonometric +/// @see ext_quaternion_transform + +#pragma once + +// Dependency: +#include "../ext/scalar_constants.hpp" +#include "../ext/quaternion_geometric.hpp" +#include "../common.hpp" +#include "../trigonometric.hpp" +#include "../exponential.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_common extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_common + /// @{ + + /// Spherical linear interpolation of two quaternions. + /// The interpolation is oriented and the rotation is performed at constant speed. + /// For short path spherical linear interpolation, use the slerp function. + /// + /// @param x A quaternion + /// @param y A quaternion + /// @param a Interpolation factor. The interpolation is defined beyond the range [0, 1]. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + /// + /// @see - slerp(qua const& x, qua const& y, T const& a) + template + GLM_FUNC_DECL qua mix(qua const& x, qua const& y, T a); + + /// Linear interpolation of two quaternions. + /// The interpolation is oriented. + /// + /// @param x A quaternion + /// @param y A quaternion + /// @param a Interpolation factor. The interpolation is defined in the range [0, 1]. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua lerp(qua const& x, qua const& y, T a); + + /// Spherical linear interpolation of two quaternions. + /// The interpolation always take the short path and the rotation is performed at constant speed. + /// + /// @param x A quaternion + /// @param y A quaternion + /// @param a Interpolation factor. The interpolation is defined beyond the range [0, 1]. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua slerp(qua const& x, qua const& y, T a); + + /// Returns the q conjugate. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua conjugate(qua const& q); + + /// Returns the q inverse. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua inverse(qua const& q); + + /// Returns true if x holds a NaN (not a number) + /// representation in the underlying implementation's set of + /// floating point representations. Returns false otherwise, + /// including for implementations with no NaN + /// representations. + /// + /// /!\ When using compiler fast math, this function may fail. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL vec<4, bool, Q> isnan(qua const& x); + + /// Returns true if x holds a positive infinity or negative + /// infinity representation in the underlying implementation's + /// set of floating point representations. Returns false + /// otherwise, including for implementations with no infinity + /// representations. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL vec<4, bool, Q> isinf(qua const& x); + + /// @} +} //namespace glm + +#include "quaternion_common.inl" diff --git a/MacroLibX/third_party/glm/ext/quaternion_common.inl b/MacroLibX/third_party/glm/ext/quaternion_common.inl new file mode 100644 index 0000000..3b2846f --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_common.inl @@ -0,0 +1,107 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER qua mix(qua const& x, qua const& y, T a) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'mix' only accept floating-point inputs"); + + T const cosTheta = dot(x, y); + + // Perform a linear interpolation when cosTheta is close to 1 to avoid side effect of sin(angle) becoming a zero denominator + if(cosTheta > static_cast(1) - epsilon()) + { + // Linear interpolation + return qua( + mix(x.w, y.w, a), + mix(x.x, y.x, a), + mix(x.y, y.y, a), + mix(x.z, y.z, a)); + } + else + { + // Essential Mathematics, page 467 + T angle = acos(cosTheta); + return (sin((static_cast(1) - a) * angle) * x + sin(a * angle) * y) / sin(angle); + } + } + + template + GLM_FUNC_QUALIFIER qua lerp(qua const& x, qua const& y, T a) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'lerp' only accept floating-point inputs"); + + // Lerp is only defined in [0, 1] + assert(a >= static_cast(0)); + assert(a <= static_cast(1)); + + return x * (static_cast(1) - a) + (y * a); + } + + template + GLM_FUNC_QUALIFIER qua slerp(qua const& x, qua const& y, T a) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'slerp' only accept floating-point inputs"); + + qua z = y; + + T cosTheta = dot(x, y); + + // If cosTheta < 0, the interpolation will take the long way around the sphere. + // To fix this, one quat must be negated. + if(cosTheta < static_cast(0)) + { + z = -y; + cosTheta = -cosTheta; + } + + // Perform a linear interpolation when cosTheta is close to 1 to avoid side effect of sin(angle) becoming a zero denominator + if(cosTheta > static_cast(1) - epsilon()) + { + // Linear interpolation + return qua( + mix(x.w, z.w, a), + mix(x.x, z.x, a), + mix(x.y, z.y, a), + mix(x.z, z.z, a)); + } + else + { + // Essential Mathematics, page 467 + T angle = acos(cosTheta); + return (sin((static_cast(1) - a) * angle) * x + sin(a * angle) * z) / sin(angle); + } + } + + template + GLM_FUNC_QUALIFIER qua conjugate(qua const& q) + { + return qua(q.w, -q.x, -q.y, -q.z); + } + + template + GLM_FUNC_QUALIFIER qua inverse(qua const& q) + { + return conjugate(q) / dot(q, q); + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> isnan(qua const& q) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isnan' only accept floating-point inputs"); + + return vec<4, bool, Q>(isnan(q.x), isnan(q.y), isnan(q.z), isnan(q.w)); + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> isinf(qua const& q) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isinf' only accept floating-point inputs"); + + return vec<4, bool, Q>(isinf(q.x), isinf(q.y), isinf(q.z), isinf(q.w)); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "quaternion_common_simd.inl" +#endif + diff --git a/MacroLibX/third_party/glm/ext/quaternion_common_simd.inl b/MacroLibX/third_party/glm/ext/quaternion_common_simd.inl new file mode 100644 index 0000000..ddfc8a4 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_common_simd.inl @@ -0,0 +1,18 @@ +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +namespace glm{ +namespace detail +{ + template + struct compute_dot, float, true> + { + static GLM_FUNC_QUALIFIER float call(qua const& x, qua const& y) + { + return _mm_cvtss_f32(glm_vec1_dot(x.data, y.data)); + } + }; +}//namespace detail +}//namespace glm + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT + diff --git a/MacroLibX/third_party/glm/ext/quaternion_double.hpp b/MacroLibX/third_party/glm/ext/quaternion_double.hpp new file mode 100644 index 0000000..63b24de --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_double.hpp @@ -0,0 +1,39 @@ +/// @ref ext_quaternion_double +/// @file glm/ext/quaternion_double.hpp +/// +/// @defgroup ext_quaternion_double GLM_EXT_quaternion_double +/// @ingroup ext +/// +/// Exposes double-precision floating point quaternion type. +/// +/// Include to use the features of this extension. +/// +/// @see ext_quaternion_float +/// @see ext_quaternion_double_precision +/// @see ext_quaternion_common +/// @see ext_quaternion_exponential +/// @see ext_quaternion_geometric +/// @see ext_quaternion_relational +/// @see ext_quaternion_transform +/// @see ext_quaternion_trigonometric + +#pragma once + +// Dependency: +#include "../detail/type_quat.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_double extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_double + /// @{ + + /// Quaternion of double-precision floating-point numbers. + typedef qua dquat; + + /// @} +} //namespace glm + diff --git a/MacroLibX/third_party/glm/ext/quaternion_double_precision.hpp b/MacroLibX/third_party/glm/ext/quaternion_double_precision.hpp new file mode 100644 index 0000000..8aa24a1 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_double_precision.hpp @@ -0,0 +1,42 @@ +/// @ref ext_quaternion_double_precision +/// @file glm/ext/quaternion_double_precision.hpp +/// +/// @defgroup ext_quaternion_double_precision GLM_EXT_quaternion_double_precision +/// @ingroup ext +/// +/// Exposes double-precision floating point quaternion type with various precision in term of ULPs. +/// +/// Include to use the features of this extension. + +#pragma once + +// Dependency: +#include "../detail/type_quat.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_double_precision extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_double_precision + /// @{ + + /// Quaternion of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see ext_quaternion_double_precision + typedef qua lowp_dquat; + + /// Quaternion of medium double-qualifier floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see ext_quaternion_double_precision + typedef qua mediump_dquat; + + /// Quaternion of high double-qualifier floating-point numbers using high precision arithmetic in term of ULPs. + /// + /// @see ext_quaternion_double_precision + typedef qua highp_dquat; + + /// @} +} //namespace glm + diff --git a/MacroLibX/third_party/glm/ext/quaternion_exponential.hpp b/MacroLibX/third_party/glm/ext/quaternion_exponential.hpp new file mode 100644 index 0000000..affe297 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_exponential.hpp @@ -0,0 +1,63 @@ +/// @ref ext_quaternion_exponential +/// @file glm/ext/quaternion_exponential.hpp +/// +/// @defgroup ext_quaternion_exponential GLM_EXT_quaternion_exponential +/// @ingroup ext +/// +/// Provides exponential functions for quaternion types +/// +/// Include to use the features of this extension. +/// +/// @see core_exponential +/// @see ext_quaternion_float +/// @see ext_quaternion_double + +#pragma once + +// Dependency: +#include "../common.hpp" +#include "../trigonometric.hpp" +#include "../geometric.hpp" +#include "../ext/scalar_constants.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_exponential extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_transform + /// @{ + + /// Returns a exponential of a quaternion. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua exp(qua const& q); + + /// Returns a logarithm of a quaternion + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua log(qua const& q); + + /// Returns a quaternion raised to a power. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua pow(qua const& q, T y); + + /// Returns the square root of a quaternion + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua sqrt(qua const& q); + + /// @} +} //namespace glm + +#include "quaternion_exponential.inl" diff --git a/MacroLibX/third_party/glm/ext/quaternion_exponential.inl b/MacroLibX/third_party/glm/ext/quaternion_exponential.inl new file mode 100644 index 0000000..8456c00 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_exponential.inl @@ -0,0 +1,85 @@ +#include "scalar_constants.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER qua exp(qua const& q) + { + vec<3, T, Q> u(q.x, q.y, q.z); + T const Angle = glm::length(u); + if (Angle < epsilon()) + return qua(); + + vec<3, T, Q> const v(u / Angle); + return qua(cos(Angle), sin(Angle) * v); + } + + template + GLM_FUNC_QUALIFIER qua log(qua const& q) + { + vec<3, T, Q> u(q.x, q.y, q.z); + T Vec3Len = length(u); + + if (Vec3Len < epsilon()) + { + if(q.w > static_cast(0)) + return qua(log(q.w), static_cast(0), static_cast(0), static_cast(0)); + else if(q.w < static_cast(0)) + return qua(log(-q.w), pi(), static_cast(0), static_cast(0)); + else + return qua(std::numeric_limits::infinity(), std::numeric_limits::infinity(), std::numeric_limits::infinity(), std::numeric_limits::infinity()); + } + else + { + T t = atan(Vec3Len, T(q.w)) / Vec3Len; + T QuatLen2 = Vec3Len * Vec3Len + q.w * q.w; + return qua(static_cast(0.5) * log(QuatLen2), t * q.x, t * q.y, t * q.z); + } + } + + template + GLM_FUNC_QUALIFIER qua pow(qua const& x, T y) + { + //Raising to the power of 0 should yield 1 + //Needed to prevent a division by 0 error later on + if(y > -epsilon() && y < epsilon()) + return qua(1,0,0,0); + + //To deal with non-unit quaternions + T magnitude = sqrt(x.x * x.x + x.y * x.y + x.z * x.z + x.w *x.w); + + T Angle; + if(abs(x.w / magnitude) > cos_one_over_two()) + { + //Scalar component is close to 1; using it to recover angle would lose precision + //Instead, we use the non-scalar components since sin() is accurate around 0 + + //Prevent a division by 0 error later on + T VectorMagnitude = x.x * x.x + x.y * x.y + x.z * x.z; + if (glm::abs(VectorMagnitude - static_cast(0)) < glm::epsilon()) { + //Equivalent to raising a real number to a power + return qua(pow(x.w, y), 0, 0, 0); + } + + Angle = asin(sqrt(VectorMagnitude) / magnitude); + } + else + { + //Scalar component is small, shouldn't cause loss of precision + Angle = acos(x.w / magnitude); + } + + T NewAngle = Angle * y; + T Div = sin(NewAngle) / sin(Angle); + T Mag = pow(magnitude, y - static_cast(1)); + return qua(cos(NewAngle) * magnitude * Mag, x.x * Div * Mag, x.y * Div * Mag, x.z * Div * Mag); + } + + template + GLM_FUNC_QUALIFIER qua sqrt(qua const& x) + { + return pow(x, static_cast(0.5)); + } +}//namespace glm + + diff --git a/MacroLibX/third_party/glm/ext/quaternion_float.hpp b/MacroLibX/third_party/glm/ext/quaternion_float.hpp new file mode 100644 index 0000000..ca42a60 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_float.hpp @@ -0,0 +1,39 @@ +/// @ref ext_quaternion_float +/// @file glm/ext/quaternion_float.hpp +/// +/// @defgroup ext_quaternion_float GLM_EXT_quaternion_float +/// @ingroup ext +/// +/// Exposes single-precision floating point quaternion type. +/// +/// Include to use the features of this extension. +/// +/// @see ext_quaternion_double +/// @see ext_quaternion_float_precision +/// @see ext_quaternion_common +/// @see ext_quaternion_exponential +/// @see ext_quaternion_geometric +/// @see ext_quaternion_relational +/// @see ext_quaternion_transform +/// @see ext_quaternion_trigonometric + +#pragma once + +// Dependency: +#include "../detail/type_quat.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_float extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_float + /// @{ + + /// Quaternion of single-precision floating-point numbers. + typedef qua quat; + + /// @} +} //namespace glm + diff --git a/MacroLibX/third_party/glm/ext/quaternion_float_precision.hpp b/MacroLibX/third_party/glm/ext/quaternion_float_precision.hpp new file mode 100644 index 0000000..f9e4f5c --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_float_precision.hpp @@ -0,0 +1,36 @@ +/// @ref ext_quaternion_float_precision +/// @file glm/ext/quaternion_float_precision.hpp +/// +/// @defgroup ext_quaternion_float_precision GLM_EXT_quaternion_float_precision +/// @ingroup ext +/// +/// Exposes single-precision floating point quaternion type with various precision in term of ULPs. +/// +/// Include to use the features of this extension. + +#pragma once + +// Dependency: +#include "../detail/type_quat.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_float_precision extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_float_precision + /// @{ + + /// Quaternion of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef qua lowp_quat; + + /// Quaternion of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef qua mediump_quat; + + /// Quaternion of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef qua highp_quat; + + /// @} +} //namespace glm + diff --git a/MacroLibX/third_party/glm/ext/quaternion_geometric.hpp b/MacroLibX/third_party/glm/ext/quaternion_geometric.hpp new file mode 100644 index 0000000..6d98bbe --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_geometric.hpp @@ -0,0 +1,70 @@ +/// @ref ext_quaternion_geometric +/// @file glm/ext/quaternion_geometric.hpp +/// +/// @defgroup ext_quaternion_geometric GLM_EXT_quaternion_geometric +/// @ingroup ext +/// +/// Provides geometric functions for quaternion types +/// +/// Include to use the features of this extension. +/// +/// @see core_geometric +/// @see ext_quaternion_float +/// @see ext_quaternion_double + +#pragma once + +// Dependency: +#include "../geometric.hpp" +#include "../exponential.hpp" +#include "../ext/vector_relational.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_geometric extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_geometric + /// @{ + + /// Returns the norm of a quaternions + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_quaternion_geometric + template + GLM_FUNC_DECL T length(qua const& q); + + /// Returns the normalized quaternion. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_quaternion_geometric + template + GLM_FUNC_DECL qua normalize(qua const& q); + + /// Returns dot product of q1 and q2, i.e., q1[0] * q2[0] + q1[1] * q2[1] + ... + /// + /// @tparam T Floating-point scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @see ext_quaternion_geometric + template + GLM_FUNC_DECL T dot(qua const& x, qua const& y); + + /// Compute a cross product. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_quaternion_geometric + template + GLM_FUNC_QUALIFIER qua cross(qua const& q1, qua const& q2); + + /// @} +} //namespace glm + +#include "quaternion_geometric.inl" diff --git a/MacroLibX/third_party/glm/ext/quaternion_geometric.inl b/MacroLibX/third_party/glm/ext/quaternion_geometric.inl new file mode 100644 index 0000000..e155ac5 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_geometric.inl @@ -0,0 +1,36 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER T dot(qua const& x, qua const& y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'dot' accepts only floating-point inputs"); + return detail::compute_dot, T, detail::is_aligned::value>::call(x, y); + } + + template + GLM_FUNC_QUALIFIER T length(qua const& q) + { + return glm::sqrt(dot(q, q)); + } + + template + GLM_FUNC_QUALIFIER qua normalize(qua const& q) + { + T len = length(q); + if(len <= static_cast(0)) // Problem + return qua(static_cast(1), static_cast(0), static_cast(0), static_cast(0)); + T oneOverLen = static_cast(1) / len; + return qua(q.w * oneOverLen, q.x * oneOverLen, q.y * oneOverLen, q.z * oneOverLen); + } + + template + GLM_FUNC_QUALIFIER qua cross(qua const& q1, qua const& q2) + { + return qua( + q1.w * q2.w - q1.x * q2.x - q1.y * q2.y - q1.z * q2.z, + q1.w * q2.x + q1.x * q2.w + q1.y * q2.z - q1.z * q2.y, + q1.w * q2.y + q1.y * q2.w + q1.z * q2.x - q1.x * q2.z, + q1.w * q2.z + q1.z * q2.w + q1.x * q2.y - q1.y * q2.x); + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/ext/quaternion_relational.hpp b/MacroLibX/third_party/glm/ext/quaternion_relational.hpp new file mode 100644 index 0000000..7aa121d --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_relational.hpp @@ -0,0 +1,62 @@ +/// @ref ext_quaternion_relational +/// @file glm/ext/quaternion_relational.hpp +/// +/// @defgroup ext_quaternion_relational GLM_EXT_quaternion_relational +/// @ingroup ext +/// +/// Exposes comparison functions for quaternion types that take a user defined epsilon values. +/// +/// Include to use the features of this extension. +/// +/// @see core_vector_relational +/// @see ext_vector_relational +/// @see ext_matrix_relational +/// @see ext_quaternion_float +/// @see ext_quaternion_double + +#pragma once + +// Dependency: +#include "../vector_relational.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_relational extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_relational + /// @{ + + /// Returns the component-wise comparison of result x == y. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL vec<4, bool, Q> equal(qua const& x, qua const& y); + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL vec<4, bool, Q> equal(qua const& x, qua const& y, T epsilon); + + /// Returns the component-wise comparison of result x != y. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL vec<4, bool, Q> notEqual(qua const& x, qua const& y); + + /// Returns the component-wise comparison of |x - y| >= epsilon. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL vec<4, bool, Q> notEqual(qua const& x, qua const& y, T epsilon); + + /// @} +} //namespace glm + +#include "quaternion_relational.inl" diff --git a/MacroLibX/third_party/glm/ext/quaternion_relational.inl b/MacroLibX/third_party/glm/ext/quaternion_relational.inl new file mode 100644 index 0000000..b1713e9 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_relational.inl @@ -0,0 +1,35 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> equal(qua const& x, qua const& y) + { + vec<4, bool, Q> Result; + for(length_t i = 0; i < x.length(); ++i) + Result[i] = x[i] == y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> equal(qua const& x, qua const& y, T epsilon) + { + vec<4, T, Q> v(x.x - y.x, x.y - y.y, x.z - y.z, x.w - y.w); + return lessThan(abs(v), vec<4, T, Q>(epsilon)); + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> notEqual(qua const& x, qua const& y) + { + vec<4, bool, Q> Result; + for(length_t i = 0; i < x.length(); ++i) + Result[i] = x[i] != y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> notEqual(qua const& x, qua const& y, T epsilon) + { + vec<4, T, Q> v(x.x - y.x, x.y - y.y, x.z - y.z, x.w - y.w); + return greaterThanEqual(abs(v), vec<4, T, Q>(epsilon)); + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/ext/quaternion_transform.hpp b/MacroLibX/third_party/glm/ext/quaternion_transform.hpp new file mode 100644 index 0000000..a9cc5c2 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_transform.hpp @@ -0,0 +1,47 @@ +/// @ref ext_quaternion_transform +/// @file glm/ext/quaternion_transform.hpp +/// +/// @defgroup ext_quaternion_transform GLM_EXT_quaternion_transform +/// @ingroup ext +/// +/// Provides transformation functions for quaternion types +/// +/// Include to use the features of this extension. +/// +/// @see ext_quaternion_float +/// @see ext_quaternion_double +/// @see ext_quaternion_exponential +/// @see ext_quaternion_geometric +/// @see ext_quaternion_relational +/// @see ext_quaternion_trigonometric + +#pragma once + +// Dependency: +#include "../common.hpp" +#include "../trigonometric.hpp" +#include "../geometric.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_transform extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_transform + /// @{ + + /// Rotates a quaternion from a vector of 3 components axis and an angle. + /// + /// @param q Source orientation + /// @param angle Angle expressed in radians. + /// @param axis Axis of the rotation + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL qua rotate(qua const& q, T const& angle, vec<3, T, Q> const& axis); + /// @} +} //namespace glm + +#include "quaternion_transform.inl" diff --git a/MacroLibX/third_party/glm/ext/quaternion_transform.inl b/MacroLibX/third_party/glm/ext/quaternion_transform.inl new file mode 100644 index 0000000..b87ecb6 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_transform.inl @@ -0,0 +1,24 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER qua rotate(qua const& q, T const& angle, vec<3, T, Q> const& v) + { + vec<3, T, Q> Tmp = v; + + // Axis of rotation must be normalised + T len = glm::length(Tmp); + if(abs(len - static_cast(1)) > static_cast(0.001)) + { + T oneOverLen = static_cast(1) / len; + Tmp.x *= oneOverLen; + Tmp.y *= oneOverLen; + Tmp.z *= oneOverLen; + } + + T const AngleRad(angle); + T const Sin = sin(AngleRad * static_cast(0.5)); + + return q * qua(cos(AngleRad * static_cast(0.5)), Tmp.x * Sin, Tmp.y * Sin, Tmp.z * Sin); + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/ext/quaternion_trigonometric.hpp b/MacroLibX/third_party/glm/ext/quaternion_trigonometric.hpp new file mode 100644 index 0000000..76cea27 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_trigonometric.hpp @@ -0,0 +1,63 @@ +/// @ref ext_quaternion_trigonometric +/// @file glm/ext/quaternion_trigonometric.hpp +/// +/// @defgroup ext_quaternion_trigonometric GLM_EXT_quaternion_trigonometric +/// @ingroup ext +/// +/// Provides trigonometric functions for quaternion types +/// +/// Include to use the features of this extension. +/// +/// @see ext_quaternion_float +/// @see ext_quaternion_double +/// @see ext_quaternion_exponential +/// @see ext_quaternion_geometric +/// @see ext_quaternion_relational +/// @see ext_quaternion_transform + +#pragma once + +// Dependency: +#include "../trigonometric.hpp" +#include "../exponential.hpp" +#include "scalar_constants.hpp" +#include "vector_relational.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_quaternion_trigonometric extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_quaternion_trigonometric + /// @{ + + /// Returns the quaternion rotation angle. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL T angle(qua const& x); + + /// Returns the q rotation axis. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL vec<3, T, Q> axis(qua const& x); + + /// Build a quaternion from an angle and a normalized axis. + /// + /// @param angle Angle expressed in radians. + /// @param axis Axis of the quaternion, must be normalized. + /// + /// @tparam T A floating-point scalar type + /// @tparam Q A value from qualifier enum + template + GLM_FUNC_DECL qua angleAxis(T const& angle, vec<3, T, Q> const& axis); + + /// @} +} //namespace glm + +#include "quaternion_trigonometric.inl" diff --git a/MacroLibX/third_party/glm/ext/quaternion_trigonometric.inl b/MacroLibX/third_party/glm/ext/quaternion_trigonometric.inl new file mode 100644 index 0000000..06b7c4c --- /dev/null +++ b/MacroLibX/third_party/glm/ext/quaternion_trigonometric.inl @@ -0,0 +1,34 @@ +#include "scalar_constants.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER T angle(qua const& x) + { + if (abs(x.w) > cos_one_over_two()) + { + return asin(sqrt(x.x * x.x + x.y * x.y + x.z * x.z)) * static_cast(2); + } + + return acos(x.w) * static_cast(2); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> axis(qua const& x) + { + T const tmp1 = static_cast(1) - x.w * x.w; + if(tmp1 <= static_cast(0)) + return vec<3, T, Q>(0, 0, 1); + T const tmp2 = static_cast(1) / sqrt(tmp1); + return vec<3, T, Q>(x.x * tmp2, x.y * tmp2, x.z * tmp2); + } + + template + GLM_FUNC_QUALIFIER qua angleAxis(T const& angle, vec<3, T, Q> const& v) + { + T const a(angle); + T const s = glm::sin(a * static_cast(0.5)); + + return qua(glm::cos(a * static_cast(0.5)), v * s); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/scalar_common.hpp b/MacroLibX/third_party/glm/ext/scalar_common.hpp new file mode 100644 index 0000000..4ab0f88 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_common.hpp @@ -0,0 +1,103 @@ +/// @ref ext_scalar_common +/// @file glm/ext/scalar_common.hpp +/// +/// @defgroup ext_scalar_common GLM_EXT_scalar_common +/// @ingroup ext +/// +/// Exposes min and max functions for 3 to 4 scalar parameters. +/// +/// Include to use the features of this extension. +/// +/// @see core_func_common +/// @see ext_vector_common + +#pragma once + +// Dependency: +#include "../common.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_scalar_common extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_scalar_common + /// @{ + + /// Returns the minimum component-wise values of 3 inputs + /// + /// @tparam T A floating-point scalar type. + template + GLM_FUNC_DECL T min(T a, T b, T c); + + /// Returns the minimum component-wise values of 4 inputs + /// + /// @tparam T A floating-point scalar type. + template + GLM_FUNC_DECL T min(T a, T b, T c, T d); + + /// Returns the maximum component-wise values of 3 inputs + /// + /// @tparam T A floating-point scalar type. + template + GLM_FUNC_DECL T max(T a, T b, T c); + + /// Returns the maximum component-wise values of 4 inputs + /// + /// @tparam T A floating-point scalar type. + template + GLM_FUNC_DECL T max(T a, T b, T c, T d); + + /// Returns the minimum component-wise values of 2 inputs. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam T A floating-point scalar type. + /// + /// @see std::fmin documentation + template + GLM_FUNC_DECL T fmin(T a, T b); + + /// Returns the minimum component-wise values of 3 inputs. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam T A floating-point scalar type. + /// + /// @see std::fmin documentation + template + GLM_FUNC_DECL T fmin(T a, T b, T c); + + /// Returns the minimum component-wise values of 4 inputs. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam T A floating-point scalar type. + /// + /// @see std::fmin documentation + template + GLM_FUNC_DECL T fmin(T a, T b, T c, T d); + + /// Returns the maximum component-wise values of 2 inputs. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam T A floating-point scalar type. + /// + /// @see std::fmax documentation + template + GLM_FUNC_DECL T fmax(T a, T b); + + /// Returns the maximum component-wise values of 3 inputs. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam T A floating-point scalar type. + /// + /// @see std::fmax documentation + template + GLM_FUNC_DECL T fmax(T a, T b, T C); + + /// Returns the maximum component-wise values of 4 inputs. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam T A floating-point scalar type. + /// + /// @see std::fmax documentation + template + GLM_FUNC_DECL T fmax(T a, T b, T C, T D); + + /// @} +}//namespace glm + +#include "scalar_common.inl" diff --git a/MacroLibX/third_party/glm/ext/scalar_common.inl b/MacroLibX/third_party/glm/ext/scalar_common.inl new file mode 100644 index 0000000..118a670 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_common.inl @@ -0,0 +1,115 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER T min(T a, T b, T c) + { + return glm::min(glm::min(a, b), c); + } + + template + GLM_FUNC_QUALIFIER T min(T a, T b, T c, T d) + { + return glm::min(glm::min(a, b), glm::min(c, d)); + } + + template + GLM_FUNC_QUALIFIER T max(T a, T b, T c) + { + return glm::max(glm::max(a, b), c); + } + + template + GLM_FUNC_QUALIFIER T max(T a, T b, T c, T d) + { + return glm::max(glm::max(a, b), glm::max(c, d)); + } + +# if GLM_HAS_CXX11_STL + using std::fmin; +# else + template + GLM_FUNC_QUALIFIER T fmin(T a, T b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point input"); + + if (isnan(a)) + return b; + return min(a, b); + } +# endif + + template + GLM_FUNC_QUALIFIER T fmin(T a, T b, T c) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point input"); + + if (isnan(a)) + return fmin(b, c); + if (isnan(b)) + return fmin(a, c); + if (isnan(c)) + return min(a, b); + return min(a, b, c); + } + + template + GLM_FUNC_QUALIFIER T fmin(T a, T b, T c, T d) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point input"); + + if (isnan(a)) + return fmin(b, c, d); + if (isnan(b)) + return min(a, fmin(c, d)); + if (isnan(c)) + return fmin(min(a, b), d); + if (isnan(d)) + return min(a, b, c); + return min(a, b, c, d); + } + + +# if GLM_HAS_CXX11_STL + using std::fmax; +# else + template + GLM_FUNC_QUALIFIER T fmax(T a, T b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point input"); + + if (isnan(a)) + return b; + return max(a, b); + } +# endif + + template + GLM_FUNC_QUALIFIER T fmax(T a, T b, T c) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point input"); + + if (isnan(a)) + return fmax(b, c); + if (isnan(b)) + return fmax(a, c); + if (isnan(c)) + return max(a, b); + return max(a, b, c); + } + + template + GLM_FUNC_QUALIFIER T fmax(T a, T b, T c, T d) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point input"); + + if (isnan(a)) + return fmax(b, c, d); + if (isnan(b)) + return max(a, fmax(c, d)); + if (isnan(c)) + return fmax(max(a, b), d); + if (isnan(d)) + return max(a, b, c); + return max(a, b, c, d); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/scalar_constants.hpp b/MacroLibX/third_party/glm/ext/scalar_constants.hpp new file mode 100644 index 0000000..74e210d --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_constants.hpp @@ -0,0 +1,40 @@ +/// @ref ext_scalar_constants +/// @file glm/ext/scalar_constants.hpp +/// +/// @defgroup ext_scalar_constants GLM_EXT_scalar_constants +/// @ingroup ext +/// +/// Provides a list of constants and precomputed useful values. +/// +/// Include to use the features of this extension. + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_scalar_constants extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_scalar_constants + /// @{ + + /// Return the epsilon constant for floating point types. + template + GLM_FUNC_DECL GLM_CONSTEXPR genType epsilon(); + + /// Return the pi constant for floating point types. + template + GLM_FUNC_DECL GLM_CONSTEXPR genType pi(); + + /// Return the value of cos(1 / 2) for floating point types. + template + GLM_FUNC_DECL GLM_CONSTEXPR genType cos_one_over_two(); + + /// @} +} //namespace glm + +#include "scalar_constants.inl" diff --git a/MacroLibX/third_party/glm/ext/scalar_constants.inl b/MacroLibX/third_party/glm/ext/scalar_constants.inl new file mode 100644 index 0000000..b475adf --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_constants.inl @@ -0,0 +1,24 @@ +#include + +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType epsilon() + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'epsilon' only accepts floating-point inputs"); + return std::numeric_limits::epsilon(); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType pi() + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'pi' only accepts floating-point inputs"); + return static_cast(3.14159265358979323846264338327950288); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType cos_one_over_two() + { + return genType(0.877582561890372716130286068203503191); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/ext/scalar_int_sized.hpp b/MacroLibX/third_party/glm/ext/scalar_int_sized.hpp new file mode 100644 index 0000000..8e9c511 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_int_sized.hpp @@ -0,0 +1,70 @@ +/// @ref ext_scalar_int_sized +/// @file glm/ext/scalar_int_sized.hpp +/// +/// @defgroup ext_scalar_int_sized GLM_EXT_scalar_int_sized +/// @ingroup ext +/// +/// Exposes sized signed integer scalar types. +/// +/// Include to use the features of this extension. +/// +/// @see ext_scalar_uint_sized + +#pragma once + +#include "../detail/setup.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_scalar_int_sized extension included") +#endif + +namespace glm{ +namespace detail +{ +# if GLM_HAS_EXTENDED_INTEGER_TYPE + typedef std::int8_t int8; + typedef std::int16_t int16; + typedef std::int32_t int32; +# else + typedef signed char int8; + typedef signed short int16; + typedef signed int int32; +#endif// + + template<> + struct is_int + { + enum test {value = ~0}; + }; + + template<> + struct is_int + { + enum test {value = ~0}; + }; + + template<> + struct is_int + { + enum test {value = ~0}; + }; +}//namespace detail + + + /// @addtogroup ext_scalar_int_sized + /// @{ + + /// 8 bit signed integer type. + typedef detail::int8 int8; + + /// 16 bit signed integer type. + typedef detail::int16 int16; + + /// 32 bit signed integer type. + typedef detail::int32 int32; + + /// 64 bit signed integer type. + typedef detail::int64 int64; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/scalar_integer.hpp b/MacroLibX/third_party/glm/ext/scalar_integer.hpp new file mode 100644 index 0000000..a2ca8a2 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_integer.hpp @@ -0,0 +1,92 @@ +/// @ref ext_scalar_integer +/// @file glm/ext/scalar_integer.hpp +/// +/// @see core (dependence) +/// +/// @defgroup ext_scalar_integer GLM_EXT_scalar_integer +/// @ingroup ext +/// +/// Include to use the features of this extension. + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../detail/_vectorize.hpp" +#include "../detail/type_float.hpp" +#include "../vector_relational.hpp" +#include "../common.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_scalar_integer extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_scalar_integer + /// @{ + + /// Return true if the value is a power of two number. + /// + /// @see ext_scalar_integer + template + GLM_FUNC_DECL bool isPowerOfTwo(genIUType v); + + /// Return the power of two number which value is just higher the input value, + /// round up to a power of two. + /// + /// @see ext_scalar_integer + template + GLM_FUNC_DECL genIUType nextPowerOfTwo(genIUType v); + + /// Return the power of two number which value is just lower the input value, + /// round down to a power of two. + /// + /// @see ext_scalar_integer + template + GLM_FUNC_DECL genIUType prevPowerOfTwo(genIUType v); + + /// Return true if the 'Value' is a multiple of 'Multiple'. + /// + /// @see ext_scalar_integer + template + GLM_FUNC_DECL bool isMultiple(genIUType v, genIUType Multiple); + + /// Higher multiple number of Source. + /// + /// @tparam genIUType Integer scalar or vector types. + /// + /// @param v Source value to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see ext_scalar_integer + template + GLM_FUNC_DECL genIUType nextMultiple(genIUType v, genIUType Multiple); + + /// Lower multiple number of Source. + /// + /// @tparam genIUType Integer scalar or vector types. + /// + /// @param v Source value to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see ext_scalar_integer + template + GLM_FUNC_DECL genIUType prevMultiple(genIUType v, genIUType Multiple); + + /// Returns the bit number of the Nth significant bit set to + /// 1 in the binary representation of value. + /// If value bitcount is less than the Nth significant bit, -1 will be returned. + /// + /// @tparam genIUType Signed or unsigned integer scalar types. + /// + /// @see ext_scalar_integer + template + GLM_FUNC_DECL int findNSB(genIUType x, int significantBitCount); + + /// @} +} //namespace glm + +#include "scalar_integer.inl" diff --git a/MacroLibX/third_party/glm/ext/scalar_integer.inl b/MacroLibX/third_party/glm/ext/scalar_integer.inl new file mode 100644 index 0000000..efba960 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_integer.inl @@ -0,0 +1,243 @@ +#include "../integer.hpp" + +namespace glm{ +namespace detail +{ + template + struct compute_ceilShift + { + GLM_FUNC_QUALIFIER static vec call(vec const& v, T) + { + return v; + } + }; + + template + struct compute_ceilShift + { + GLM_FUNC_QUALIFIER static vec call(vec const& v, T Shift) + { + return v | (v >> Shift); + } + }; + + template + struct compute_ceilPowerOfTwo + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + GLM_STATIC_ASSERT(!std::numeric_limits::is_iec559, "'ceilPowerOfTwo' only accept integer scalar or vector inputs"); + + vec const Sign(sign(x)); + + vec v(abs(x)); + + v = v - static_cast(1); + v = v | (v >> static_cast(1)); + v = v | (v >> static_cast(2)); + v = v | (v >> static_cast(4)); + v = compute_ceilShift= 2>::call(v, 8); + v = compute_ceilShift= 4>::call(v, 16); + v = compute_ceilShift= 8>::call(v, 32); + return (v + static_cast(1)) * Sign; + } + }; + + template + struct compute_ceilPowerOfTwo + { + GLM_FUNC_QUALIFIER static vec call(vec const& x) + { + GLM_STATIC_ASSERT(!std::numeric_limits::is_iec559, "'ceilPowerOfTwo' only accept integer scalar or vector inputs"); + + vec v(x); + + v = v - static_cast(1); + v = v | (v >> static_cast(1)); + v = v | (v >> static_cast(2)); + v = v | (v >> static_cast(4)); + v = compute_ceilShift= 2>::call(v, 8); + v = compute_ceilShift= 4>::call(v, 16); + v = compute_ceilShift= 8>::call(v, 32); + return v + static_cast(1); + } + }; + + template + struct compute_ceilMultiple{}; + + template<> + struct compute_ceilMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + if(Source > genType(0)) + return Source + (Multiple - std::fmod(Source, Multiple)); + else + return Source + std::fmod(-Source, Multiple); + } + }; + + template<> + struct compute_ceilMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + genType Tmp = Source - genType(1); + return Tmp + (Multiple - (Tmp % Multiple)); + } + }; + + template<> + struct compute_ceilMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + assert(Multiple > genType(0)); + if(Source > genType(0)) + { + genType Tmp = Source - genType(1); + return Tmp + (Multiple - (Tmp % Multiple)); + } + else + return Source + (-Source % Multiple); + } + }; + + template + struct compute_floorMultiple{}; + + template<> + struct compute_floorMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + if(Source >= genType(0)) + return Source - std::fmod(Source, Multiple); + else + return Source - std::fmod(Source, Multiple) - Multiple; + } + }; + + template<> + struct compute_floorMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + if(Source >= genType(0)) + return Source - Source % Multiple; + else + { + genType Tmp = Source + genType(1); + return Tmp - Tmp % Multiple - Multiple; + } + } + }; + + template<> + struct compute_floorMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + if(Source >= genType(0)) + return Source - Source % Multiple; + else + { + genType Tmp = Source + genType(1); + return Tmp - Tmp % Multiple - Multiple; + } + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER bool isPowerOfTwo(genIUType Value) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isPowerOfTwo' only accept integer inputs"); + + genIUType const Result = glm::abs(Value); + return !(Result & (Result - 1)); + } + + template + GLM_FUNC_QUALIFIER genIUType nextPowerOfTwo(genIUType value) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextPowerOfTwo' only accept integer inputs"); + + return detail::compute_ceilPowerOfTwo<1, genIUType, defaultp, std::numeric_limits::is_signed>::call(vec<1, genIUType, defaultp>(value)).x; + } + + template + GLM_FUNC_QUALIFIER genIUType prevPowerOfTwo(genIUType value) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevPowerOfTwo' only accept integer inputs"); + + return isPowerOfTwo(value) ? value : static_cast(static_cast(1) << static_cast(findMSB(value))); + } + + template + GLM_FUNC_QUALIFIER bool isMultiple(genIUType Value, genIUType Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isMultiple' only accept integer inputs"); + + return isMultiple(vec<1, genIUType>(Value), vec<1, genIUType>(Multiple)).x; + } + + template + GLM_FUNC_QUALIFIER genIUType nextMultiple(genIUType Source, genIUType Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextMultiple' only accept integer inputs"); + + return detail::compute_ceilMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); + } + + template + GLM_FUNC_QUALIFIER genIUType prevMultiple(genIUType Source, genIUType Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevMultiple' only accept integer inputs"); + + return detail::compute_floorMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); + } + + template + GLM_FUNC_QUALIFIER int findNSB(genIUType x, int significantBitCount) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findNSB' only accept integer inputs"); + + if(bitCount(x) < significantBitCount) + return -1; + + genIUType const One = static_cast(1); + int bitPos = 0; + + genIUType key = x; + int nBitCount = significantBitCount; + int Step = sizeof(x) * 8 / 2; + while (key > One) + { + genIUType Mask = static_cast((One << Step) - One); + genIUType currentKey = key & Mask; + int currentBitCount = bitCount(currentKey); + if (nBitCount > currentBitCount) + { + nBitCount -= currentBitCount; + bitPos += Step; + key >>= static_cast(Step); + } + else + { + key = key & Mask; + } + + Step >>= 1; + } + + return static_cast(bitPos); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/scalar_relational.hpp b/MacroLibX/third_party/glm/ext/scalar_relational.hpp new file mode 100644 index 0000000..3076a5e --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_relational.hpp @@ -0,0 +1,65 @@ +/// @ref ext_scalar_relational +/// @file glm/ext/scalar_relational.hpp +/// +/// @defgroup ext_scalar_relational GLM_EXT_scalar_relational +/// @ingroup ext +/// +/// Exposes comparison functions for scalar types that take a user defined epsilon values. +/// +/// Include to use the features of this extension. +/// +/// @see core_vector_relational +/// @see ext_vector_relational +/// @see ext_matrix_relational + +#pragma once + +// Dependencies +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_scalar_relational extension included") +#endif + +namespace glm +{ + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is satisfied. + /// + /// @tparam genType Floating-point or integer scalar types + template + GLM_FUNC_DECL GLM_CONSTEXPR bool equal(genType const& x, genType const& y, genType const& epsilon); + + /// Returns the component-wise comparison of |x - y| >= epsilon. + /// True if this expression is not satisfied. + /// + /// @tparam genType Floating-point or integer scalar types + template + GLM_FUNC_DECL GLM_CONSTEXPR bool notEqual(genType const& x, genType const& y, genType const& epsilon); + + /// Returns the component-wise comparison between two scalars in term of ULPs. + /// True if this expression is satisfied. + /// + /// @param x First operand. + /// @param y Second operand. + /// @param ULPs Maximum difference in ULPs between the two operators to consider them equal. + /// + /// @tparam genType Floating-point or integer scalar types + template + GLM_FUNC_DECL GLM_CONSTEXPR bool equal(genType const& x, genType const& y, int ULPs); + + /// Returns the component-wise comparison between two scalars in term of ULPs. + /// True if this expression is not satisfied. + /// + /// @param x First operand. + /// @param y Second operand. + /// @param ULPs Maximum difference in ULPs between the two operators to consider them not equal. + /// + /// @tparam genType Floating-point or integer scalar types + template + GLM_FUNC_DECL GLM_CONSTEXPR bool notEqual(genType const& x, genType const& y, int ULPs); + + /// @} +}//namespace glm + +#include "scalar_relational.inl" diff --git a/MacroLibX/third_party/glm/ext/scalar_relational.inl b/MacroLibX/third_party/glm/ext/scalar_relational.inl new file mode 100644 index 0000000..c85583e --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_relational.inl @@ -0,0 +1,40 @@ +#include "../common.hpp" +#include "../ext/scalar_int_sized.hpp" +#include "../ext/scalar_uint_sized.hpp" +#include "../detail/type_float.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool equal(genType const& x, genType const& y, genType const& epsilon) + { + return abs(x - y) <= epsilon; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool notEqual(genType const& x, genType const& y, genType const& epsilon) + { + return abs(x - y) > epsilon; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool equal(genType const& x, genType const& y, int MaxULPs) + { + detail::float_t const a(x); + detail::float_t const b(y); + + // Different signs means they do not match. + if(a.negative() != b.negative()) + return false; + + // Find the difference in ULPs. + typename detail::float_t::int_type const DiffULPs = abs(a.i - b.i); + return DiffULPs <= MaxULPs; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool notEqual(genType const& x, genType const& y, int ULPs) + { + return !equal(x, y, ULPs); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/scalar_uint_sized.hpp b/MacroLibX/third_party/glm/ext/scalar_uint_sized.hpp new file mode 100644 index 0000000..fd5267f --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_uint_sized.hpp @@ -0,0 +1,70 @@ +/// @ref ext_scalar_uint_sized +/// @file glm/ext/scalar_uint_sized.hpp +/// +/// @defgroup ext_scalar_uint_sized GLM_EXT_scalar_uint_sized +/// @ingroup ext +/// +/// Exposes sized unsigned integer scalar types. +/// +/// Include to use the features of this extension. +/// +/// @see ext_scalar_int_sized + +#pragma once + +#include "../detail/setup.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_scalar_uint_sized extension included") +#endif + +namespace glm{ +namespace detail +{ +# if GLM_HAS_EXTENDED_INTEGER_TYPE + typedef std::uint8_t uint8; + typedef std::uint16_t uint16; + typedef std::uint32_t uint32; +# else + typedef unsigned char uint8; + typedef unsigned short uint16; + typedef unsigned int uint32; +#endif + + template<> + struct is_int + { + enum test {value = ~0}; + }; + + template<> + struct is_int + { + enum test {value = ~0}; + }; + + template<> + struct is_int + { + enum test {value = ~0}; + }; +}//namespace detail + + + /// @addtogroup ext_scalar_uint_sized + /// @{ + + /// 8 bit unsigned integer type. + typedef detail::uint8 uint8; + + /// 16 bit unsigned integer type. + typedef detail::uint16 uint16; + + /// 32 bit unsigned integer type. + typedef detail::uint32 uint32; + + /// 64 bit unsigned integer type. + typedef detail::uint64 uint64; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/scalar_ulp.hpp b/MacroLibX/third_party/glm/ext/scalar_ulp.hpp new file mode 100644 index 0000000..941ada3 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_ulp.hpp @@ -0,0 +1,74 @@ +/// @ref ext_scalar_ulp +/// @file glm/ext/scalar_ulp.hpp +/// +/// @defgroup ext_scalar_ulp GLM_EXT_scalar_ulp +/// @ingroup ext +/// +/// Allow the measurement of the accuracy of a function against a reference +/// implementation. This extension works on floating-point data and provide results +/// in ULP. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_ulp +/// @see ext_scalar_relational + +#pragma once + +// Dependencies +#include "../ext/scalar_int_sized.hpp" +#include "../common.hpp" +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_scalar_ulp extension included") +#endif + +namespace glm +{ + /// Return the next ULP value(s) after the input value(s). + /// + /// @tparam genType A floating-point scalar type. + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL genType nextFloat(genType x); + + /// Return the previous ULP value(s) before the input value(s). + /// + /// @tparam genType A floating-point scalar type. + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL genType prevFloat(genType x); + + /// Return the value(s) ULP distance after the input value(s). + /// + /// @tparam genType A floating-point scalar type. + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL genType nextFloat(genType x, int ULPs); + + /// Return the value(s) ULP distance before the input value(s). + /// + /// @tparam genType A floating-point scalar type. + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL genType prevFloat(genType x, int ULPs); + + /// Return the distance in the number of ULP between 2 single-precision floating-point scalars. + /// + /// @see ext_scalar_ulp + GLM_FUNC_DECL int floatDistance(float x, float y); + + /// Return the distance in the number of ULP between 2 double-precision floating-point scalars. + /// + /// @see ext_scalar_ulp + GLM_FUNC_DECL int64 floatDistance(double x, double y); + + /// @} +}//namespace glm + +#include "scalar_ulp.inl" diff --git a/MacroLibX/third_party/glm/ext/scalar_ulp.inl b/MacroLibX/third_party/glm/ext/scalar_ulp.inl new file mode 100644 index 0000000..308df15 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/scalar_ulp.inl @@ -0,0 +1,284 @@ +/// Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved. +/// +/// Developed at SunPro, a Sun Microsystems, Inc. business. +/// Permission to use, copy, modify, and distribute this +/// software is freely granted, provided that this notice +/// is preserved. + +#include "../detail/type_float.hpp" +#include "../ext/scalar_constants.hpp" +#include +#include + +#if(GLM_COMPILER & GLM_COMPILER_VC) +# pragma warning(push) +# pragma warning(disable : 4127) +#endif + +typedef union +{ + float value; + /* FIXME: Assumes 32 bit int. */ + unsigned int word; +} ieee_float_shape_type; + +typedef union +{ + double value; + struct + { + int lsw; + int msw; + } parts; +} ieee_double_shape_type; + +#define GLM_EXTRACT_WORDS(ix0,ix1,d) \ + do { \ + ieee_double_shape_type ew_u; \ + ew_u.value = (d); \ + (ix0) = ew_u.parts.msw; \ + (ix1) = ew_u.parts.lsw; \ + } while (0) + +#define GLM_GET_FLOAT_WORD(i,d) \ + do { \ + ieee_float_shape_type gf_u; \ + gf_u.value = (d); \ + (i) = gf_u.word; \ + } while (0) + +#define GLM_SET_FLOAT_WORD(d,i) \ + do { \ + ieee_float_shape_type sf_u; \ + sf_u.word = (i); \ + (d) = sf_u.value; \ + } while (0) + +#define GLM_INSERT_WORDS(d,ix0,ix1) \ + do { \ + ieee_double_shape_type iw_u; \ + iw_u.parts.msw = (ix0); \ + iw_u.parts.lsw = (ix1); \ + (d) = iw_u.value; \ + } while (0) + +namespace glm{ +namespace detail +{ + GLM_FUNC_QUALIFIER float nextafterf(float x, float y) + { + volatile float t; + int hx, hy, ix, iy; + + GLM_GET_FLOAT_WORD(hx, x); + GLM_GET_FLOAT_WORD(hy, y); + ix = hx & 0x7fffffff; // |x| + iy = hy & 0x7fffffff; // |y| + + if((ix > 0x7f800000) || // x is nan + (iy > 0x7f800000)) // y is nan + return x + y; + if(abs(y - x) <= epsilon()) + return y; // x=y, return y + if(ix == 0) + { // x == 0 + GLM_SET_FLOAT_WORD(x, (hy & 0x80000000) | 1);// return +-minsubnormal + t = x * x; + if(abs(t - x) <= epsilon()) + return t; + else + return x; // raise underflow flag + } + if(hx >= 0) + { // x > 0 + if(hx > hy) // x > y, x -= ulp + hx -= 1; + else // x < y, x += ulp + hx += 1; + } + else + { // x < 0 + if(hy >= 0 || hx > hy) // x < y, x -= ulp + hx -= 1; + else // x > y, x += ulp + hx += 1; + } + hy = hx & 0x7f800000; + if(hy >= 0x7f800000) + return x + x; // overflow + if(hy < 0x00800000) // underflow + { + t = x * x; + if(abs(t - x) > epsilon()) + { // raise underflow flag + GLM_SET_FLOAT_WORD(y, hx); + return y; + } + } + GLM_SET_FLOAT_WORD(x, hx); + return x; + } + + GLM_FUNC_QUALIFIER double nextafter(double x, double y) + { + volatile double t; + int hx, hy, ix, iy; + unsigned int lx, ly; + + GLM_EXTRACT_WORDS(hx, lx, x); + GLM_EXTRACT_WORDS(hy, ly, y); + ix = hx & 0x7fffffff; // |x| + iy = hy & 0x7fffffff; // |y| + + if(((ix >= 0x7ff00000) && ((ix - 0x7ff00000) | lx) != 0) || // x is nan + ((iy >= 0x7ff00000) && ((iy - 0x7ff00000) | ly) != 0)) // y is nan + return x + y; + if(abs(y - x) <= epsilon()) + return y; // x=y, return y + if((ix | lx) == 0) + { // x == 0 + GLM_INSERT_WORDS(x, hy & 0x80000000, 1); // return +-minsubnormal + t = x * x; + if(abs(t - x) <= epsilon()) + return t; + else + return x; // raise underflow flag + } + if(hx >= 0) { // x > 0 + if(hx > hy || ((hx == hy) && (lx > ly))) { // x > y, x -= ulp + if(lx == 0) hx -= 1; + lx -= 1; + } + else { // x < y, x += ulp + lx += 1; + if(lx == 0) hx += 1; + } + } + else { // x < 0 + if(hy >= 0 || hx > hy || ((hx == hy) && (lx > ly))){// x < y, x -= ulp + if(lx == 0) hx -= 1; + lx -= 1; + } + else { // x > y, x += ulp + lx += 1; + if(lx == 0) hx += 1; + } + } + hy = hx & 0x7ff00000; + if(hy >= 0x7ff00000) + return x + x; // overflow + if(hy < 0x00100000) + { // underflow + t = x * x; + if(abs(t - x) > epsilon()) + { // raise underflow flag + GLM_INSERT_WORDS(y, hx, lx); + return y; + } + } + GLM_INSERT_WORDS(x, hx, lx); + return x; + } +}//namespace detail +}//namespace glm + +#if(GLM_COMPILER & GLM_COMPILER_VC) +# pragma warning(pop) +#endif + +namespace glm +{ + template<> + GLM_FUNC_QUALIFIER float nextFloat(float x) + { +# if GLM_HAS_CXX11_STL + return std::nextafter(x, std::numeric_limits::max()); +# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) + return detail::nextafterf(x, FLT_MAX); +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) + return __builtin_nextafterf(x, FLT_MAX); +# else + return nextafterf(x, FLT_MAX); +# endif + } + + template<> + GLM_FUNC_QUALIFIER double nextFloat(double x) + { +# if GLM_HAS_CXX11_STL + return std::nextafter(x, std::numeric_limits::max()); +# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) + return detail::nextafter(x, std::numeric_limits::max()); +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) + return __builtin_nextafter(x, DBL_MAX); +# else + return nextafter(x, DBL_MAX); +# endif + } + + template + GLM_FUNC_QUALIFIER T nextFloat(T x, int ULPs) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'next_float' only accept floating-point input"); + assert(ULPs >= 0); + + T temp = x; + for(int i = 0; i < ULPs; ++i) + temp = nextFloat(temp); + return temp; + } + + GLM_FUNC_QUALIFIER float prevFloat(float x) + { +# if GLM_HAS_CXX11_STL + return std::nextafter(x, std::numeric_limits::min()); +# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) + return detail::nextafterf(x, FLT_MIN); +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) + return __builtin_nextafterf(x, FLT_MIN); +# else + return nextafterf(x, FLT_MIN); +# endif + } + + GLM_FUNC_QUALIFIER double prevFloat(double x) + { +# if GLM_HAS_CXX11_STL + return std::nextafter(x, std::numeric_limits::min()); +# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) + return _nextafter(x, DBL_MIN); +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) + return __builtin_nextafter(x, DBL_MIN); +# else + return nextafter(x, DBL_MIN); +# endif + } + + template + GLM_FUNC_QUALIFIER T prevFloat(T x, int ULPs) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'prev_float' only accept floating-point input"); + assert(ULPs >= 0); + + T temp = x; + for(int i = 0; i < ULPs; ++i) + temp = prevFloat(temp); + return temp; + } + + GLM_FUNC_QUALIFIER int floatDistance(float x, float y) + { + detail::float_t const a(x); + detail::float_t const b(y); + + return abs(a.i - b.i); + } + + GLM_FUNC_QUALIFIER int64 floatDistance(double x, double y) + { + detail::float_t const a(x); + detail::float_t const b(y); + + return abs(a.i - b.i); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_bool1.hpp b/MacroLibX/third_party/glm/ext/vector_bool1.hpp new file mode 100644 index 0000000..002c320 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_bool1.hpp @@ -0,0 +1,30 @@ +/// @ref ext_vector_bool1 +/// @file glm/ext/vector_bool1.hpp +/// +/// @defgroup ext_vector_bool1 GLM_EXT_vector_bool1 +/// @ingroup ext +/// +/// Exposes bvec1 vector type. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_bool1_precision extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_bool1 extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_bool1 + /// @{ + + /// 1 components vector of boolean. + typedef vec<1, bool, defaultp> bvec1; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_bool1_precision.hpp b/MacroLibX/third_party/glm/ext/vector_bool1_precision.hpp new file mode 100644 index 0000000..e62d3cf --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_bool1_precision.hpp @@ -0,0 +1,34 @@ +/// @ref ext_vector_bool1_precision +/// @file glm/ext/vector_bool1_precision.hpp +/// +/// @defgroup ext_vector_bool1_precision GLM_EXT_vector_bool1_precision +/// @ingroup ext +/// +/// Exposes highp_bvec1, mediump_bvec1 and lowp_bvec1 types. +/// +/// Include to use the features of this extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_bool1_precision extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_bool1_precision + /// @{ + + /// 1 component vector of bool values. + typedef vec<1, bool, highp> highp_bvec1; + + /// 1 component vector of bool values. + typedef vec<1, bool, mediump> mediump_bvec1; + + /// 1 component vector of bool values. + typedef vec<1, bool, lowp> lowp_bvec1; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_bool2.hpp b/MacroLibX/third_party/glm/ext/vector_bool2.hpp new file mode 100644 index 0000000..52288b7 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_bool2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_bool2.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 2 components vector of boolean. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<2, bool, defaultp> bvec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_bool2_precision.hpp b/MacroLibX/third_party/glm/ext/vector_bool2_precision.hpp new file mode 100644 index 0000000..4370933 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_bool2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_bool2_precision.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 2 components vector of high qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, bool, highp> highp_bvec2; + + /// 2 components vector of medium qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, bool, mediump> mediump_bvec2; + + /// 2 components vector of low qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, bool, lowp> lowp_bvec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_bool3.hpp b/MacroLibX/third_party/glm/ext/vector_bool3.hpp new file mode 100644 index 0000000..90a0b7e --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_bool3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_bool3.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 3 components vector of boolean. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<3, bool, defaultp> bvec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_bool3_precision.hpp b/MacroLibX/third_party/glm/ext/vector_bool3_precision.hpp new file mode 100644 index 0000000..89cd2d3 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_bool3_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_bool3_precision.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 3 components vector of high qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, bool, highp> highp_bvec3; + + /// 3 components vector of medium qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, bool, mediump> mediump_bvec3; + + /// 3 components vector of low qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, bool, lowp> lowp_bvec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_bool4.hpp b/MacroLibX/third_party/glm/ext/vector_bool4.hpp new file mode 100644 index 0000000..18aa71b --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_bool4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_bool4.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 4 components vector of boolean. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<4, bool, defaultp> bvec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_bool4_precision.hpp b/MacroLibX/third_party/glm/ext/vector_bool4_precision.hpp new file mode 100644 index 0000000..79786e5 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_bool4_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_bool4_precision.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 4 components vector of high qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, bool, highp> highp_bvec4; + + /// 4 components vector of medium qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, bool, mediump> mediump_bvec4; + + /// 4 components vector of low qualifier bool numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, bool, lowp> lowp_bvec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_common.hpp b/MacroLibX/third_party/glm/ext/vector_common.hpp new file mode 100644 index 0000000..324fe1c --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_common.hpp @@ -0,0 +1,144 @@ +/// @ref ext_vector_common +/// @file glm/ext/vector_common.hpp +/// +/// @defgroup ext_vector_common GLM_EXT_vector_common +/// @ingroup ext +/// +/// Exposes min and max functions for 3 to 4 vector parameters. +/// +/// Include to use the features of this extension. +/// +/// @see core_common +/// @see ext_scalar_common + +#pragma once + +// Dependency: +#include "../ext/scalar_common.hpp" +#include "../common.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_common extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_common + /// @{ + + /// Return the minimum component-wise values of 3 inputs + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec min(vec const& a, vec const& b, vec const& c); + + /// Return the minimum component-wise values of 4 inputs + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec min(vec const& a, vec const& b, vec const& c, vec const& d); + + /// Return the maximum component-wise values of 3 inputs + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec max(vec const& x, vec const& y, vec const& z); + + /// Return the maximum component-wise values of 4 inputs + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec max( vec const& x, vec const& y, vec const& z, vec const& w); + + /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see std::fmin documentation + template + GLM_FUNC_DECL vec fmin(vec const& x, T y); + + /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see std::fmin documentation + template + GLM_FUNC_DECL vec fmin(vec const& x, vec const& y); + + /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see std::fmin documentation + template + GLM_FUNC_DECL vec fmin(vec const& a, vec const& b, vec const& c); + + /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see std::fmin documentation + template + GLM_FUNC_DECL vec fmin(vec const& a, vec const& b, vec const& c, vec const& d); + + /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see std::fmax documentation + template + GLM_FUNC_DECL vec fmax(vec const& a, T b); + + /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see std::fmax documentation + template + GLM_FUNC_DECL vec fmax(vec const& a, vec const& b); + + /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see std::fmax documentation + template + GLM_FUNC_DECL vec fmax(vec const& a, vec const& b, vec const& c); + + /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see std::fmax documentation + template + GLM_FUNC_DECL vec fmax(vec const& a, vec const& b, vec const& c, vec const& d); + + /// @} +}//namespace glm + +#include "vector_common.inl" diff --git a/MacroLibX/third_party/glm/ext/vector_common.inl b/MacroLibX/third_party/glm/ext/vector_common.inl new file mode 100644 index 0000000..71f3809 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_common.inl @@ -0,0 +1,88 @@ +#include "../detail/_vectorize.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec min(vec const& x, vec const& y, vec const& z) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'min' only accept floating-point or integer inputs"); + return glm::min(glm::min(x, y), z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec min(vec const& x, vec const& y, vec const& z, vec const& w) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'min' only accept floating-point or integer inputs"); + return glm::min(glm::min(x, y), glm::min(z, w)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec max(vec const& x, vec const& y, vec const& z) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'max' only accept floating-point or integer inputs"); + return glm::max(glm::max(x, y), z); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec max(vec const& x, vec const& y, vec const& z, vec const& w) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'max' only accept floating-point or integer inputs"); + return glm::max(glm::max(x, y), glm::max(z, w)); + } + + template + GLM_FUNC_QUALIFIER vec fmin(vec const& a, T b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point inputs"); + return detail::functor2::call(fmin, a, vec(b)); + } + + template + GLM_FUNC_QUALIFIER vec fmin(vec const& a, vec const& b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point inputs"); + return detail::functor2::call(fmin, a, b); + } + + template + GLM_FUNC_QUALIFIER vec fmin(vec const& a, vec const& b, vec const& c) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point inputs"); + return fmin(fmin(a, b), c); + } + + template + GLM_FUNC_QUALIFIER vec fmin(vec const& a, vec const& b, vec const& c, vec const& d) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point inputs"); + return fmin(fmin(a, b), fmin(c, d)); + } + + template + GLM_FUNC_QUALIFIER vec fmax(vec const& a, T b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point inputs"); + return detail::functor2::call(fmax, a, vec(b)); + } + + template + GLM_FUNC_QUALIFIER vec fmax(vec const& a, vec const& b) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point inputs"); + return detail::functor2::call(fmax, a, b); + } + + template + GLM_FUNC_QUALIFIER vec fmax(vec const& a, vec const& b, vec const& c) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point inputs"); + return fmax(fmax(a, b), c); + } + + template + GLM_FUNC_QUALIFIER vec fmax(vec const& a, vec const& b, vec const& c, vec const& d) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point inputs"); + return fmax(fmax(a, b), fmax(c, d)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_double1.hpp b/MacroLibX/third_party/glm/ext/vector_double1.hpp new file mode 100644 index 0000000..3882667 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_double1.hpp @@ -0,0 +1,31 @@ +/// @ref ext_vector_double1 +/// @file glm/ext/vector_double1.hpp +/// +/// @defgroup ext_vector_double1 GLM_EXT_vector_double1 +/// @ingroup ext +/// +/// Exposes double-precision floating point vector type with one component. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_double1_precision extension. +/// @see ext_vector_float1 extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_double1 extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_double1 + /// @{ + + /// 1 components vector of double-precision floating-point numbers. + typedef vec<1, double, defaultp> dvec1; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_double1_precision.hpp b/MacroLibX/third_party/glm/ext/vector_double1_precision.hpp new file mode 100644 index 0000000..1d47195 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_double1_precision.hpp @@ -0,0 +1,36 @@ +/// @ref ext_vector_double1_precision +/// @file glm/ext/vector_double1_precision.hpp +/// +/// @defgroup ext_vector_double1_precision GLM_EXT_vector_double1_precision +/// @ingroup ext +/// +/// Exposes highp_dvec1, mediump_dvec1 and lowp_dvec1 types. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_double1 + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_double1_precision extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_double1_precision + /// @{ + + /// 1 component vector of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<1, double, highp> highp_dvec1; + + /// 1 component vector of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<1, double, mediump> mediump_dvec1; + + /// 1 component vector of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<1, double, lowp> lowp_dvec1; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_double2.hpp b/MacroLibX/third_party/glm/ext/vector_double2.hpp new file mode 100644 index 0000000..60e3577 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_double2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_double2.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 2 components vector of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<2, double, defaultp> dvec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_double2_precision.hpp b/MacroLibX/third_party/glm/ext/vector_double2_precision.hpp new file mode 100644 index 0000000..fa53940 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_double2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_double2_precision.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 2 components vector of high double-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, double, highp> highp_dvec2; + + /// 2 components vector of medium double-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, double, mediump> mediump_dvec2; + + /// 2 components vector of low double-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, double, lowp> lowp_dvec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_double3.hpp b/MacroLibX/third_party/glm/ext/vector_double3.hpp new file mode 100644 index 0000000..6dfe4c6 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_double3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_double3.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 3 components vector of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<3, double, defaultp> dvec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_double3_precision.hpp b/MacroLibX/third_party/glm/ext/vector_double3_precision.hpp new file mode 100644 index 0000000..a8cfa37 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_double3_precision.hpp @@ -0,0 +1,34 @@ +/// @ref core +/// @file glm/ext/vector_double3_precision.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 3 components vector of high double-qualifier floating-point numbers. + /// There is no guarantee on the actual qualifier. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, double, highp> highp_dvec3; + + /// 3 components vector of medium double-qualifier floating-point numbers. + /// There is no guarantee on the actual qualifier. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, double, mediump> mediump_dvec3; + + /// 3 components vector of low double-qualifier floating-point numbers. + /// There is no guarantee on the actual qualifier. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, double, lowp> lowp_dvec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_double4.hpp b/MacroLibX/third_party/glm/ext/vector_double4.hpp new file mode 100644 index 0000000..87f225f --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_double4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_double4.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 4 components vector of double-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<4, double, defaultp> dvec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_double4_precision.hpp b/MacroLibX/third_party/glm/ext/vector_double4_precision.hpp new file mode 100644 index 0000000..09cafa1 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_double4_precision.hpp @@ -0,0 +1,35 @@ +/// @ref core +/// @file glm/ext/vector_double4_precision.hpp + +#pragma once +#include "../detail/setup.hpp" +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 4 components vector of high double-qualifier floating-point numbers. + /// There is no guarantee on the actual qualifier. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, double, highp> highp_dvec4; + + /// 4 components vector of medium double-qualifier floating-point numbers. + /// There is no guarantee on the actual qualifier. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, double, mediump> mediump_dvec4; + + /// 4 components vector of low double-qualifier floating-point numbers. + /// There is no guarantee on the actual qualifier. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, double, lowp> lowp_dvec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_float1.hpp b/MacroLibX/third_party/glm/ext/vector_float1.hpp new file mode 100644 index 0000000..28acc2c --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_float1.hpp @@ -0,0 +1,31 @@ +/// @ref ext_vector_float1 +/// @file glm/ext/vector_float1.hpp +/// +/// @defgroup ext_vector_float1 GLM_EXT_vector_float1 +/// @ingroup ext +/// +/// Exposes single-precision floating point vector type with one component. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_float1_precision extension. +/// @see ext_vector_double1 extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_float1 extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_float1 + /// @{ + + /// 1 components vector of single-precision floating-point numbers. + typedef vec<1, float, defaultp> vec1; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_float1_precision.hpp b/MacroLibX/third_party/glm/ext/vector_float1_precision.hpp new file mode 100644 index 0000000..6e8dad8 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_float1_precision.hpp @@ -0,0 +1,36 @@ +/// @ref ext_vector_float1_precision +/// @file glm/ext/vector_float1_precision.hpp +/// +/// @defgroup ext_vector_float1_precision GLM_EXT_vector_float1_precision +/// @ingroup ext +/// +/// Exposes highp_vec1, mediump_vec1 and lowp_vec1 types. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_float1 extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_float1_precision extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_float1_precision + /// @{ + + /// 1 component vector of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<1, float, highp> highp_vec1; + + /// 1 component vector of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<1, float, mediump> mediump_vec1; + + /// 1 component vector of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<1, float, lowp> lowp_vec1; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_float2.hpp b/MacroLibX/third_party/glm/ext/vector_float2.hpp new file mode 100644 index 0000000..d31545d --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_float2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_float2.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 2 components vector of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<2, float, defaultp> vec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_float2_precision.hpp b/MacroLibX/third_party/glm/ext/vector_float2_precision.hpp new file mode 100644 index 0000000..23c0820 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_float2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_float2_precision.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 2 components vector of high single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, float, highp> highp_vec2; + + /// 2 components vector of medium single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, float, mediump> mediump_vec2; + + /// 2 components vector of low single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, float, lowp> lowp_vec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_float3.hpp b/MacroLibX/third_party/glm/ext/vector_float3.hpp new file mode 100644 index 0000000..cd79a62 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_float3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_float3.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 3 components vector of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<3, float, defaultp> vec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_float3_precision.hpp b/MacroLibX/third_party/glm/ext/vector_float3_precision.hpp new file mode 100644 index 0000000..be640b5 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_float3_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_float3_precision.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 3 components vector of high single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, float, highp> highp_vec3; + + /// 3 components vector of medium single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, float, mediump> mediump_vec3; + + /// 3 components vector of low single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, float, lowp> lowp_vec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_float4.hpp b/MacroLibX/third_party/glm/ext/vector_float4.hpp new file mode 100644 index 0000000..d84adcc --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_float4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_float4.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 4 components vector of single-precision floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<4, float, defaultp> vec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_float4_precision.hpp b/MacroLibX/third_party/glm/ext/vector_float4_precision.hpp new file mode 100644 index 0000000..aede838 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_float4_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_float4_precision.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 4 components vector of high single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, float, highp> highp_vec4; + + /// 4 components vector of medium single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, float, mediump> mediump_vec4; + + /// 4 components vector of low single-qualifier floating-point numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, float, lowp> lowp_vec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_int1.hpp b/MacroLibX/third_party/glm/ext/vector_int1.hpp new file mode 100644 index 0000000..dc86038 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_int1.hpp @@ -0,0 +1,32 @@ +/// @ref ext_vector_int1 +/// @file glm/ext/vector_int1.hpp +/// +/// @defgroup ext_vector_int1 GLM_EXT_vector_int1 +/// @ingroup ext +/// +/// Exposes ivec1 vector type. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_uint1 extension. +/// @see ext_vector_int1_precision extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_int1 extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_int1 + /// @{ + + /// 1 component vector of signed integer numbers. + typedef vec<1, int, defaultp> ivec1; + + /// @} +}//namespace glm + diff --git a/MacroLibX/third_party/glm/ext/vector_int1_precision.hpp b/MacroLibX/third_party/glm/ext/vector_int1_precision.hpp new file mode 100644 index 0000000..3323954 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_int1_precision.hpp @@ -0,0 +1,34 @@ +/// @ref ext_vector_int1_precision +/// @file glm/ext/vector_int1_precision.hpp +/// +/// @defgroup ext_vector_int1_precision GLM_EXT_vector_int1_precision +/// @ingroup ext +/// +/// Exposes highp_ivec1, mediump_ivec1 and lowp_ivec1 types. +/// +/// Include to use the features of this extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_int1_precision extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_int1_precision + /// @{ + + /// 1 component vector of signed integer values. + typedef vec<1, int, highp> highp_ivec1; + + /// 1 component vector of signed integer values. + typedef vec<1, int, mediump> mediump_ivec1; + + /// 1 component vector of signed integer values. + typedef vec<1, int, lowp> lowp_ivec1; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_int2.hpp b/MacroLibX/third_party/glm/ext/vector_int2.hpp new file mode 100644 index 0000000..aef803e --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_int2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_int2.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 2 components vector of signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<2, int, defaultp> ivec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_int2_precision.hpp b/MacroLibX/third_party/glm/ext/vector_int2_precision.hpp new file mode 100644 index 0000000..97315fc --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_int2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_int2_precision.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 2 components vector of high qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, int, highp> highp_ivec2; + + /// 2 components vector of medium qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, int, mediump> mediump_ivec2; + + /// 2 components vector of low qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, int, lowp> lowp_ivec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_int3.hpp b/MacroLibX/third_party/glm/ext/vector_int3.hpp new file mode 100644 index 0000000..4767e61 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_int3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_int3.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 3 components vector of signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<3, int, defaultp> ivec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_int3_precision.hpp b/MacroLibX/third_party/glm/ext/vector_int3_precision.hpp new file mode 100644 index 0000000..2cd3f5f --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_int3_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_int3_precision.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 3 components vector of high qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, int, highp> highp_ivec3; + + /// 3 components vector of medium qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, int, mediump> mediump_ivec3; + + /// 3 components vector of low qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, int, lowp> lowp_ivec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_int4.hpp b/MacroLibX/third_party/glm/ext/vector_int4.hpp new file mode 100644 index 0000000..bb23adf --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_int4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_int4.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 4 components vector of signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<4, int, defaultp> ivec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_int4_precision.hpp b/MacroLibX/third_party/glm/ext/vector_int4_precision.hpp new file mode 100644 index 0000000..4fcd791 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_int4_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_int4_precision.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 4 components vector of high qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, int, highp> highp_ivec4; + + /// 4 components vector of medium qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, int, mediump> mediump_ivec4; + + /// 4 components vector of low qualifier signed integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, int, lowp> lowp_ivec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_integer.hpp b/MacroLibX/third_party/glm/ext/vector_integer.hpp new file mode 100644 index 0000000..1304dd8 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_integer.hpp @@ -0,0 +1,149 @@ +/// @ref ext_vector_integer +/// @file glm/ext/vector_integer.hpp +/// +/// @see core (dependence) +/// @see ext_vector_integer (dependence) +/// +/// @defgroup ext_vector_integer GLM_EXT_vector_integer +/// @ingroup ext +/// +/// Include to use the features of this extension. + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../detail/_vectorize.hpp" +#include "../vector_relational.hpp" +#include "../common.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_integer extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_integer + /// @{ + + /// Return true if the value is a power of two number. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec isPowerOfTwo(vec const& v); + + /// Return the power of two number which value is just higher the input value, + /// round up to a power of two. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec nextPowerOfTwo(vec const& v); + + /// Return the power of two number which value is just lower the input value, + /// round down to a power of two. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec prevPowerOfTwo(vec const& v); + + /// Return true if the 'Value' is a multiple of 'Multiple'. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec isMultiple(vec const& v, T Multiple); + + /// Return true if the 'Value' is a multiple of 'Multiple'. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec isMultiple(vec const& v, vec const& Multiple); + + /// Higher multiple number of Source. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @param v Source values to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec nextMultiple(vec const& v, T Multiple); + + /// Higher multiple number of Source. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @param v Source values to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec nextMultiple(vec const& v, vec const& Multiple); + + /// Lower multiple number of Source. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @param v Source values to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec prevMultiple(vec const& v, T Multiple); + + /// Lower multiple number of Source. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed or unsigned integer scalar types. + /// @tparam Q Value from qualifier enum + /// + /// @param v Source values to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec prevMultiple(vec const& v, vec const& Multiple); + + /// Returns the bit number of the Nth significant bit set to + /// 1 in the binary representation of value. + /// If value bitcount is less than the Nth significant bit, -1 will be returned. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Signed or unsigned integer scalar types. + /// + /// @see ext_vector_integer + template + GLM_FUNC_DECL vec findNSB(vec const& Source, vec SignificantBitCount); + + /// @} +} //namespace glm + +#include "vector_integer.inl" diff --git a/MacroLibX/third_party/glm/ext/vector_integer.inl b/MacroLibX/third_party/glm/ext/vector_integer.inl new file mode 100644 index 0000000..939ff5e --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_integer.inl @@ -0,0 +1,85 @@ +#include "scalar_integer.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec isPowerOfTwo(vec const& Value) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isPowerOfTwo' only accept integer inputs"); + + vec const Result(abs(Value)); + return equal(Result & (Result - vec(1)), vec(0)); + } + + template + GLM_FUNC_QUALIFIER vec nextPowerOfTwo(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextPowerOfTwo' only accept integer inputs"); + + return detail::compute_ceilPowerOfTwo::is_signed>::call(v); + } + + template + GLM_FUNC_QUALIFIER vec prevPowerOfTwo(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevPowerOfTwo' only accept integer inputs"); + + return detail::functor1::call(prevPowerOfTwo, v); + } + + template + GLM_FUNC_QUALIFIER vec isMultiple(vec const& Value, T Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isMultiple' only accept integer inputs"); + + return (Value % Multiple) == vec(0); + } + + template + GLM_FUNC_QUALIFIER vec isMultiple(vec const& Value, vec const& Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isMultiple' only accept integer inputs"); + + return (Value % Multiple) == vec(0); + } + + template + GLM_FUNC_QUALIFIER vec nextMultiple(vec const& Source, T Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextMultiple' only accept integer inputs"); + + return detail::functor2::call(nextMultiple, Source, vec(Multiple)); + } + + template + GLM_FUNC_QUALIFIER vec nextMultiple(vec const& Source, vec const& Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextMultiple' only accept integer inputs"); + + return detail::functor2::call(nextMultiple, Source, Multiple); + } + + template + GLM_FUNC_QUALIFIER vec prevMultiple(vec const& Source, T Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevMultiple' only accept integer inputs"); + + return detail::functor2::call(prevMultiple, Source, vec(Multiple)); + } + + template + GLM_FUNC_QUALIFIER vec prevMultiple(vec const& Source, vec const& Multiple) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevMultiple' only accept integer inputs"); + + return detail::functor2::call(prevMultiple, Source, Multiple); + } + + template + GLM_FUNC_QUALIFIER vec findNSB(vec const& Source, vec SignificantBitCount) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findNSB' only accept integer inputs"); + + return detail::functor2_vec_int::call(findNSB, Source, SignificantBitCount); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_relational.hpp b/MacroLibX/third_party/glm/ext/vector_relational.hpp new file mode 100644 index 0000000..1c2367d --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_relational.hpp @@ -0,0 +1,107 @@ +/// @ref ext_vector_relational +/// @file glm/ext/vector_relational.hpp +/// +/// @see core (dependence) +/// @see ext_scalar_integer (dependence) +/// +/// @defgroup ext_vector_relational GLM_EXT_vector_relational +/// @ingroup ext +/// +/// Exposes comparison functions for vector types that take a user defined epsilon values. +/// +/// Include to use the features of this extension. +/// +/// @see core_vector_relational +/// @see ext_scalar_relational +/// @see ext_matrix_relational + +#pragma once + +// Dependencies +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_relational extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_relational + /// @{ + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is satisfied. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y, T epsilon); + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is satisfied. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y, vec const& epsilon); + + /// Returns the component-wise comparison of |x - y| >= epsilon. + /// True if this expression is not satisfied. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, T epsilon); + + /// Returns the component-wise comparison of |x - y| >= epsilon. + /// True if this expression is not satisfied. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, vec const& epsilon); + + /// Returns the component-wise comparison between two vectors in term of ULPs. + /// True if this expression is satisfied. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y, int ULPs); + + /// Returns the component-wise comparison between two vectors in term of ULPs. + /// True if this expression is satisfied. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y, vec const& ULPs); + + /// Returns the component-wise comparison between two vectors in term of ULPs. + /// True if this expression is not satisfied. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, int ULPs); + + /// Returns the component-wise comparison between two vectors in term of ULPs. + /// True if this expression is not satisfied. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, vec const& ULPs); + + /// @} +}//namespace glm + +#include "vector_relational.inl" diff --git a/MacroLibX/third_party/glm/ext/vector_relational.inl b/MacroLibX/third_party/glm/ext/vector_relational.inl new file mode 100644 index 0000000..7a39ab5 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_relational.inl @@ -0,0 +1,75 @@ +#include "../vector_relational.hpp" +#include "../common.hpp" +#include "../detail/qualifier.hpp" +#include "../detail/type_float.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y, T Epsilon) + { + return equal(x, y, vec(Epsilon)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y, vec const& Epsilon) + { + return lessThanEqual(abs(x - y), Epsilon); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, T Epsilon) + { + return notEqual(x, y, vec(Epsilon)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, vec const& Epsilon) + { + return greaterThan(abs(x - y), Epsilon); + } + + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y, int MaxULPs) + { + return equal(x, y, vec(MaxULPs)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y, vec const& MaxULPs) + { + vec Result(false); + for(length_t i = 0; i < L; ++i) + { + detail::float_t const a(x[i]); + detail::float_t const b(y[i]); + + // Different signs means they do not match. + if(a.negative() != b.negative()) + { + // Check for equality to make sure +0==-0 + Result[i] = a.mantissa() == b.mantissa() && a.exponent() == b.exponent(); + } + else + { + // Find the difference in ULPs. + typename detail::float_t::int_type const DiffULPs = abs(a.i - b.i); + Result[i] = DiffULPs <= MaxULPs[i]; + } + } + return Result; + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, int MaxULPs) + { + return notEqual(x, y, vec(MaxULPs)); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, vec const& MaxULPs) + { + return not_(equal(x, y, MaxULPs)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_uint1.hpp b/MacroLibX/third_party/glm/ext/vector_uint1.hpp new file mode 100644 index 0000000..eb8a704 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_uint1.hpp @@ -0,0 +1,32 @@ +/// @ref ext_vector_uint1 +/// @file glm/ext/vector_uint1.hpp +/// +/// @defgroup ext_vector_uint1 GLM_EXT_vector_uint1 +/// @ingroup ext +/// +/// Exposes uvec1 vector type. +/// +/// Include to use the features of this extension. +/// +/// @see ext_vector_int1 extension. +/// @see ext_vector_uint1_precision extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_uint1 extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_uint1 + /// @{ + + /// 1 component vector of unsigned integer numbers. + typedef vec<1, unsigned int, defaultp> uvec1; + + /// @} +}//namespace glm + diff --git a/MacroLibX/third_party/glm/ext/vector_uint1_precision.hpp b/MacroLibX/third_party/glm/ext/vector_uint1_precision.hpp new file mode 100644 index 0000000..30daa5b --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_uint1_precision.hpp @@ -0,0 +1,40 @@ +/// @ref ext_vector_uint1_precision +/// @file glm/ext/vector_uint1_precision.hpp +/// +/// @defgroup ext_vector_uint1_precision GLM_EXT_vector_uint1_precision +/// @ingroup ext +/// +/// Exposes highp_uvec1, mediump_uvec1 and lowp_uvec1 types. +/// +/// Include to use the features of this extension. + +#pragma once + +#include "../detail/type_vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_uint1_precision extension included") +#endif + +namespace glm +{ + /// @addtogroup ext_vector_uint1_precision + /// @{ + + /// 1 component vector of unsigned integer values. + /// + /// @see ext_vector_uint1_precision + typedef vec<1, unsigned int, highp> highp_uvec1; + + /// 1 component vector of unsigned integer values. + /// + /// @see ext_vector_uint1_precision + typedef vec<1, unsigned int, mediump> mediump_uvec1; + + /// 1 component vector of unsigned integer values. + /// + /// @see ext_vector_uint1_precision + typedef vec<1, unsigned int, lowp> lowp_uvec1; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_uint2.hpp b/MacroLibX/third_party/glm/ext/vector_uint2.hpp new file mode 100644 index 0000000..03c00f5 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_uint2.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_uint2.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 2 components vector of unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<2, unsigned int, defaultp> uvec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_uint2_precision.hpp b/MacroLibX/third_party/glm/ext/vector_uint2_precision.hpp new file mode 100644 index 0000000..2ba7b0d --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_uint2_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_uint2_precision.hpp + +#pragma once +#include "../detail/type_vec2.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 2 components vector of high qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, unsigned int, highp> highp_uvec2; + + /// 2 components vector of medium qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, unsigned int, mediump> mediump_uvec2; + + /// 2 components vector of low qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<2, unsigned int, lowp> lowp_uvec2; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_uint3.hpp b/MacroLibX/third_party/glm/ext/vector_uint3.hpp new file mode 100644 index 0000000..f5b41c4 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_uint3.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_uint3.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 3 components vector of unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<3, unsigned int, defaultp> uvec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_uint3_precision.hpp b/MacroLibX/third_party/glm/ext/vector_uint3_precision.hpp new file mode 100644 index 0000000..125191c --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_uint3_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_uint3_precision.hpp + +#pragma once +#include "../detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 3 components vector of high qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, unsigned int, highp> highp_uvec3; + + /// 3 components vector of medium qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, unsigned int, mediump> mediump_uvec3; + + /// 3 components vector of low qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<3, unsigned int, lowp> lowp_uvec3; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_uint4.hpp b/MacroLibX/third_party/glm/ext/vector_uint4.hpp new file mode 100644 index 0000000..32ced58 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_uint4.hpp @@ -0,0 +1,18 @@ +/// @ref core +/// @file glm/ext/vector_uint4.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector + /// @{ + + /// 4 components vector of unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + typedef vec<4, unsigned int, defaultp> uvec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_uint4_precision.hpp b/MacroLibX/third_party/glm/ext/vector_uint4_precision.hpp new file mode 100644 index 0000000..cf4097c --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_uint4_precision.hpp @@ -0,0 +1,31 @@ +/// @ref core +/// @file glm/ext/vector_uint4_precision.hpp + +#pragma once +#include "../detail/type_vec4.hpp" + +namespace glm +{ + /// @addtogroup core_vector_precision + /// @{ + + /// 4 components vector of high qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, unsigned int, highp> highp_uvec4; + + /// 4 components vector of medium qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, unsigned int, mediump> mediump_uvec4; + + /// 4 components vector of low qualifier unsigned integer numbers. + /// + /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors + /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier + typedef vec<4, unsigned int, lowp> lowp_uvec4; + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/ext/vector_ulp.hpp b/MacroLibX/third_party/glm/ext/vector_ulp.hpp new file mode 100644 index 0000000..6210396 --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_ulp.hpp @@ -0,0 +1,109 @@ +/// @ref ext_vector_ulp +/// @file glm/ext/vector_ulp.hpp +/// +/// @defgroup ext_vector_ulp GLM_EXT_vector_ulp +/// @ingroup ext +/// +/// Allow the measurement of the accuracy of a function against a reference +/// implementation. This extension works on floating-point data and provide results +/// in ULP. +/// +/// Include to use the features of this extension. +/// +/// @see ext_scalar_ulp +/// @see ext_scalar_relational +/// @see ext_vector_relational + +#pragma once + +// Dependencies +#include "../ext/scalar_ulp.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_EXT_vector_ulp extension included") +#endif + +namespace glm +{ + /// Return the next ULP value(s) after the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL vec nextFloat(vec const& x); + + /// Return the value(s) ULP distance after the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL vec nextFloat(vec const& x, int ULPs); + + /// Return the value(s) ULP distance after the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL vec nextFloat(vec const& x, vec const& ULPs); + + /// Return the previous ULP value(s) before the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL vec prevFloat(vec const& x); + + /// Return the value(s) ULP distance before the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL vec prevFloat(vec const& x, int ULPs); + + /// Return the value(s) ULP distance before the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL vec prevFloat(vec const& x, vec const& ULPs); + + /// Return the distance in the number of ULP between 2 single-precision floating-point scalars. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam Q Value from qualifier enum + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL vec floatDistance(vec const& x, vec const& y); + + /// Return the distance in the number of ULP between 2 double-precision floating-point scalars. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam Q Value from qualifier enum + /// + /// @see ext_scalar_ulp + template + GLM_FUNC_DECL vec floatDistance(vec const& x, vec const& y); + + /// @} +}//namespace glm + +#include "vector_ulp.inl" diff --git a/MacroLibX/third_party/glm/ext/vector_ulp.inl b/MacroLibX/third_party/glm/ext/vector_ulp.inl new file mode 100644 index 0000000..91565ce --- /dev/null +++ b/MacroLibX/third_party/glm/ext/vector_ulp.inl @@ -0,0 +1,74 @@ +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec nextFloat(vec const& x) + { + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = nextFloat(x[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec nextFloat(vec const& x, int ULPs) + { + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = nextFloat(x[i], ULPs); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec nextFloat(vec const& x, vec const& ULPs) + { + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = nextFloat(x[i], ULPs[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec prevFloat(vec const& x) + { + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = prevFloat(x[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec prevFloat(vec const& x, int ULPs) + { + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = prevFloat(x[i], ULPs); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec prevFloat(vec const& x, vec const& ULPs) + { + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = prevFloat(x[i], ULPs[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec floatDistance(vec const& x, vec const& y) + { + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = floatDistance(x[i], y[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec floatDistance(vec const& x, vec const& y) + { + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = floatDistance(x[i], y[i]); + return Result; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/fwd.hpp b/MacroLibX/third_party/glm/fwd.hpp new file mode 100644 index 0000000..474d44f --- /dev/null +++ b/MacroLibX/third_party/glm/fwd.hpp @@ -0,0 +1,818 @@ +#pragma once + +#include "detail/qualifier.hpp" + +namespace glm +{ +#if GLM_HAS_EXTENDED_INTEGER_TYPE + typedef std::int8_t int8; + typedef std::int16_t int16; + typedef std::int32_t int32; + typedef std::int64_t int64; + + typedef std::uint8_t uint8; + typedef std::uint16_t uint16; + typedef std::uint32_t uint32; + typedef std::uint64_t uint64; +#else + typedef signed char int8; + typedef signed short int16; + typedef signed int int32; + typedef detail::int64 int64; + + typedef unsigned char uint8; + typedef unsigned short uint16; + typedef unsigned int uint32; + typedef detail::uint64 uint64; +#endif + + // Scalar int + + typedef int8 lowp_i8; + typedef int8 mediump_i8; + typedef int8 highp_i8; + typedef int8 i8; + + typedef int8 lowp_int8; + typedef int8 mediump_int8; + typedef int8 highp_int8; + + typedef int8 lowp_int8_t; + typedef int8 mediump_int8_t; + typedef int8 highp_int8_t; + typedef int8 int8_t; + + typedef int16 lowp_i16; + typedef int16 mediump_i16; + typedef int16 highp_i16; + typedef int16 i16; + + typedef int16 lowp_int16; + typedef int16 mediump_int16; + typedef int16 highp_int16; + + typedef int16 lowp_int16_t; + typedef int16 mediump_int16_t; + typedef int16 highp_int16_t; + typedef int16 int16_t; + + typedef int32 lowp_i32; + typedef int32 mediump_i32; + typedef int32 highp_i32; + typedef int32 i32; + + typedef int32 lowp_int32; + typedef int32 mediump_int32; + typedef int32 highp_int32; + + typedef int32 lowp_int32_t; + typedef int32 mediump_int32_t; + typedef int32 highp_int32_t; + typedef int32 int32_t; + + typedef int64 lowp_i64; + typedef int64 mediump_i64; + typedef int64 highp_i64; + typedef int64 i64; + + typedef int64 lowp_int64; + typedef int64 mediump_int64; + typedef int64 highp_int64; + + typedef int64 lowp_int64_t; + typedef int64 mediump_int64_t; + typedef int64 highp_int64_t; + typedef int64 int64_t; + + // Scalar uint + + typedef uint8 lowp_u8; + typedef uint8 mediump_u8; + typedef uint8 highp_u8; + typedef uint8 u8; + + typedef uint8 lowp_uint8; + typedef uint8 mediump_uint8; + typedef uint8 highp_uint8; + + typedef uint8 lowp_uint8_t; + typedef uint8 mediump_uint8_t; + typedef uint8 highp_uint8_t; + typedef uint8 uint8_t; + + typedef uint16 lowp_u16; + typedef uint16 mediump_u16; + typedef uint16 highp_u16; + typedef uint16 u16; + + typedef uint16 lowp_uint16; + typedef uint16 mediump_uint16; + typedef uint16 highp_uint16; + + typedef uint16 lowp_uint16_t; + typedef uint16 mediump_uint16_t; + typedef uint16 highp_uint16_t; + typedef uint16 uint16_t; + + typedef uint32 lowp_u32; + typedef uint32 mediump_u32; + typedef uint32 highp_u32; + typedef uint32 u32; + + typedef uint32 lowp_uint32; + typedef uint32 mediump_uint32; + typedef uint32 highp_uint32; + + typedef uint32 lowp_uint32_t; + typedef uint32 mediump_uint32_t; + typedef uint32 highp_uint32_t; + typedef uint32 uint32_t; + + typedef uint64 lowp_u64; + typedef uint64 mediump_u64; + typedef uint64 highp_u64; + typedef uint64 u64; + + typedef uint64 lowp_uint64; + typedef uint64 mediump_uint64; + typedef uint64 highp_uint64; + + typedef uint64 lowp_uint64_t; + typedef uint64 mediump_uint64_t; + typedef uint64 highp_uint64_t; + typedef uint64 uint64_t; + + // Scalar float + + typedef float lowp_f32; + typedef float mediump_f32; + typedef float highp_f32; + typedef float f32; + + typedef float lowp_float32; + typedef float mediump_float32; + typedef float highp_float32; + typedef float float32; + + typedef float lowp_float32_t; + typedef float mediump_float32_t; + typedef float highp_float32_t; + typedef float float32_t; + + + typedef double lowp_f64; + typedef double mediump_f64; + typedef double highp_f64; + typedef double f64; + + typedef double lowp_float64; + typedef double mediump_float64; + typedef double highp_float64; + typedef double float64; + + typedef double lowp_float64_t; + typedef double mediump_float64_t; + typedef double highp_float64_t; + typedef double float64_t; + + // Vector bool + + typedef vec<1, bool, lowp> lowp_bvec1; + typedef vec<2, bool, lowp> lowp_bvec2; + typedef vec<3, bool, lowp> lowp_bvec3; + typedef vec<4, bool, lowp> lowp_bvec4; + + typedef vec<1, bool, mediump> mediump_bvec1; + typedef vec<2, bool, mediump> mediump_bvec2; + typedef vec<3, bool, mediump> mediump_bvec3; + typedef vec<4, bool, mediump> mediump_bvec4; + + typedef vec<1, bool, highp> highp_bvec1; + typedef vec<2, bool, highp> highp_bvec2; + typedef vec<3, bool, highp> highp_bvec3; + typedef vec<4, bool, highp> highp_bvec4; + + typedef vec<1, bool, defaultp> bvec1; + typedef vec<2, bool, defaultp> bvec2; + typedef vec<3, bool, defaultp> bvec3; + typedef vec<4, bool, defaultp> bvec4; + + // Vector int + + typedef vec<1, i32, lowp> lowp_ivec1; + typedef vec<2, i32, lowp> lowp_ivec2; + typedef vec<3, i32, lowp> lowp_ivec3; + typedef vec<4, i32, lowp> lowp_ivec4; + + typedef vec<1, i32, mediump> mediump_ivec1; + typedef vec<2, i32, mediump> mediump_ivec2; + typedef vec<3, i32, mediump> mediump_ivec3; + typedef vec<4, i32, mediump> mediump_ivec4; + + typedef vec<1, i32, highp> highp_ivec1; + typedef vec<2, i32, highp> highp_ivec2; + typedef vec<3, i32, highp> highp_ivec3; + typedef vec<4, i32, highp> highp_ivec4; + + typedef vec<1, i32, defaultp> ivec1; + typedef vec<2, i32, defaultp> ivec2; + typedef vec<3, i32, defaultp> ivec3; + typedef vec<4, i32, defaultp> ivec4; + + typedef vec<1, i8, lowp> lowp_i8vec1; + typedef vec<2, i8, lowp> lowp_i8vec2; + typedef vec<3, i8, lowp> lowp_i8vec3; + typedef vec<4, i8, lowp> lowp_i8vec4; + + typedef vec<1, i8, mediump> mediump_i8vec1; + typedef vec<2, i8, mediump> mediump_i8vec2; + typedef vec<3, i8, mediump> mediump_i8vec3; + typedef vec<4, i8, mediump> mediump_i8vec4; + + typedef vec<1, i8, highp> highp_i8vec1; + typedef vec<2, i8, highp> highp_i8vec2; + typedef vec<3, i8, highp> highp_i8vec3; + typedef vec<4, i8, highp> highp_i8vec4; + + typedef vec<1, i8, defaultp> i8vec1; + typedef vec<2, i8, defaultp> i8vec2; + typedef vec<3, i8, defaultp> i8vec3; + typedef vec<4, i8, defaultp> i8vec4; + + typedef vec<1, i16, lowp> lowp_i16vec1; + typedef vec<2, i16, lowp> lowp_i16vec2; + typedef vec<3, i16, lowp> lowp_i16vec3; + typedef vec<4, i16, lowp> lowp_i16vec4; + + typedef vec<1, i16, mediump> mediump_i16vec1; + typedef vec<2, i16, mediump> mediump_i16vec2; + typedef vec<3, i16, mediump> mediump_i16vec3; + typedef vec<4, i16, mediump> mediump_i16vec4; + + typedef vec<1, i16, highp> highp_i16vec1; + typedef vec<2, i16, highp> highp_i16vec2; + typedef vec<3, i16, highp> highp_i16vec3; + typedef vec<4, i16, highp> highp_i16vec4; + + typedef vec<1, i16, defaultp> i16vec1; + typedef vec<2, i16, defaultp> i16vec2; + typedef vec<3, i16, defaultp> i16vec3; + typedef vec<4, i16, defaultp> i16vec4; + + typedef vec<1, i32, lowp> lowp_i32vec1; + typedef vec<2, i32, lowp> lowp_i32vec2; + typedef vec<3, i32, lowp> lowp_i32vec3; + typedef vec<4, i32, lowp> lowp_i32vec4; + + typedef vec<1, i32, mediump> mediump_i32vec1; + typedef vec<2, i32, mediump> mediump_i32vec2; + typedef vec<3, i32, mediump> mediump_i32vec3; + typedef vec<4, i32, mediump> mediump_i32vec4; + + typedef vec<1, i32, highp> highp_i32vec1; + typedef vec<2, i32, highp> highp_i32vec2; + typedef vec<3, i32, highp> highp_i32vec3; + typedef vec<4, i32, highp> highp_i32vec4; + + typedef vec<1, i32, defaultp> i32vec1; + typedef vec<2, i32, defaultp> i32vec2; + typedef vec<3, i32, defaultp> i32vec3; + typedef vec<4, i32, defaultp> i32vec4; + + typedef vec<1, i64, lowp> lowp_i64vec1; + typedef vec<2, i64, lowp> lowp_i64vec2; + typedef vec<3, i64, lowp> lowp_i64vec3; + typedef vec<4, i64, lowp> lowp_i64vec4; + + typedef vec<1, i64, mediump> mediump_i64vec1; + typedef vec<2, i64, mediump> mediump_i64vec2; + typedef vec<3, i64, mediump> mediump_i64vec3; + typedef vec<4, i64, mediump> mediump_i64vec4; + + typedef vec<1, i64, highp> highp_i64vec1; + typedef vec<2, i64, highp> highp_i64vec2; + typedef vec<3, i64, highp> highp_i64vec3; + typedef vec<4, i64, highp> highp_i64vec4; + + typedef vec<1, i64, defaultp> i64vec1; + typedef vec<2, i64, defaultp> i64vec2; + typedef vec<3, i64, defaultp> i64vec3; + typedef vec<4, i64, defaultp> i64vec4; + + // Vector uint + + typedef vec<1, u32, lowp> lowp_uvec1; + typedef vec<2, u32, lowp> lowp_uvec2; + typedef vec<3, u32, lowp> lowp_uvec3; + typedef vec<4, u32, lowp> lowp_uvec4; + + typedef vec<1, u32, mediump> mediump_uvec1; + typedef vec<2, u32, mediump> mediump_uvec2; + typedef vec<3, u32, mediump> mediump_uvec3; + typedef vec<4, u32, mediump> mediump_uvec4; + + typedef vec<1, u32, highp> highp_uvec1; + typedef vec<2, u32, highp> highp_uvec2; + typedef vec<3, u32, highp> highp_uvec3; + typedef vec<4, u32, highp> highp_uvec4; + + typedef vec<1, u32, defaultp> uvec1; + typedef vec<2, u32, defaultp> uvec2; + typedef vec<3, u32, defaultp> uvec3; + typedef vec<4, u32, defaultp> uvec4; + + typedef vec<1, u8, lowp> lowp_u8vec1; + typedef vec<2, u8, lowp> lowp_u8vec2; + typedef vec<3, u8, lowp> lowp_u8vec3; + typedef vec<4, u8, lowp> lowp_u8vec4; + + typedef vec<1, u8, mediump> mediump_u8vec1; + typedef vec<2, u8, mediump> mediump_u8vec2; + typedef vec<3, u8, mediump> mediump_u8vec3; + typedef vec<4, u8, mediump> mediump_u8vec4; + + typedef vec<1, u8, highp> highp_u8vec1; + typedef vec<2, u8, highp> highp_u8vec2; + typedef vec<3, u8, highp> highp_u8vec3; + typedef vec<4, u8, highp> highp_u8vec4; + + typedef vec<1, u8, defaultp> u8vec1; + typedef vec<2, u8, defaultp> u8vec2; + typedef vec<3, u8, defaultp> u8vec3; + typedef vec<4, u8, defaultp> u8vec4; + + typedef vec<1, u16, lowp> lowp_u16vec1; + typedef vec<2, u16, lowp> lowp_u16vec2; + typedef vec<3, u16, lowp> lowp_u16vec3; + typedef vec<4, u16, lowp> lowp_u16vec4; + + typedef vec<1, u16, mediump> mediump_u16vec1; + typedef vec<2, u16, mediump> mediump_u16vec2; + typedef vec<3, u16, mediump> mediump_u16vec3; + typedef vec<4, u16, mediump> mediump_u16vec4; + + typedef vec<1, u16, highp> highp_u16vec1; + typedef vec<2, u16, highp> highp_u16vec2; + typedef vec<3, u16, highp> highp_u16vec3; + typedef vec<4, u16, highp> highp_u16vec4; + + typedef vec<1, u16, defaultp> u16vec1; + typedef vec<2, u16, defaultp> u16vec2; + typedef vec<3, u16, defaultp> u16vec3; + typedef vec<4, u16, defaultp> u16vec4; + + typedef vec<1, u32, lowp> lowp_u32vec1; + typedef vec<2, u32, lowp> lowp_u32vec2; + typedef vec<3, u32, lowp> lowp_u32vec3; + typedef vec<4, u32, lowp> lowp_u32vec4; + + typedef vec<1, u32, mediump> mediump_u32vec1; + typedef vec<2, u32, mediump> mediump_u32vec2; + typedef vec<3, u32, mediump> mediump_u32vec3; + typedef vec<4, u32, mediump> mediump_u32vec4; + + typedef vec<1, u32, highp> highp_u32vec1; + typedef vec<2, u32, highp> highp_u32vec2; + typedef vec<3, u32, highp> highp_u32vec3; + typedef vec<4, u32, highp> highp_u32vec4; + + typedef vec<1, u32, defaultp> u32vec1; + typedef vec<2, u32, defaultp> u32vec2; + typedef vec<3, u32, defaultp> u32vec3; + typedef vec<4, u32, defaultp> u32vec4; + + typedef vec<1, u64, lowp> lowp_u64vec1; + typedef vec<2, u64, lowp> lowp_u64vec2; + typedef vec<3, u64, lowp> lowp_u64vec3; + typedef vec<4, u64, lowp> lowp_u64vec4; + + typedef vec<1, u64, mediump> mediump_u64vec1; + typedef vec<2, u64, mediump> mediump_u64vec2; + typedef vec<3, u64, mediump> mediump_u64vec3; + typedef vec<4, u64, mediump> mediump_u64vec4; + + typedef vec<1, u64, highp> highp_u64vec1; + typedef vec<2, u64, highp> highp_u64vec2; + typedef vec<3, u64, highp> highp_u64vec3; + typedef vec<4, u64, highp> highp_u64vec4; + + typedef vec<1, u64, defaultp> u64vec1; + typedef vec<2, u64, defaultp> u64vec2; + typedef vec<3, u64, defaultp> u64vec3; + typedef vec<4, u64, defaultp> u64vec4; + + // Vector float + + typedef vec<1, float, lowp> lowp_vec1; + typedef vec<2, float, lowp> lowp_vec2; + typedef vec<3, float, lowp> lowp_vec3; + typedef vec<4, float, lowp> lowp_vec4; + + typedef vec<1, float, mediump> mediump_vec1; + typedef vec<2, float, mediump> mediump_vec2; + typedef vec<3, float, mediump> mediump_vec3; + typedef vec<4, float, mediump> mediump_vec4; + + typedef vec<1, float, highp> highp_vec1; + typedef vec<2, float, highp> highp_vec2; + typedef vec<3, float, highp> highp_vec3; + typedef vec<4, float, highp> highp_vec4; + + typedef vec<1, float, defaultp> vec1; + typedef vec<2, float, defaultp> vec2; + typedef vec<3, float, defaultp> vec3; + typedef vec<4, float, defaultp> vec4; + + typedef vec<1, float, lowp> lowp_fvec1; + typedef vec<2, float, lowp> lowp_fvec2; + typedef vec<3, float, lowp> lowp_fvec3; + typedef vec<4, float, lowp> lowp_fvec4; + + typedef vec<1, float, mediump> mediump_fvec1; + typedef vec<2, float, mediump> mediump_fvec2; + typedef vec<3, float, mediump> mediump_fvec3; + typedef vec<4, float, mediump> mediump_fvec4; + + typedef vec<1, float, highp> highp_fvec1; + typedef vec<2, float, highp> highp_fvec2; + typedef vec<3, float, highp> highp_fvec3; + typedef vec<4, float, highp> highp_fvec4; + + typedef vec<1, f32, defaultp> fvec1; + typedef vec<2, f32, defaultp> fvec2; + typedef vec<3, f32, defaultp> fvec3; + typedef vec<4, f32, defaultp> fvec4; + + typedef vec<1, f32, lowp> lowp_f32vec1; + typedef vec<2, f32, lowp> lowp_f32vec2; + typedef vec<3, f32, lowp> lowp_f32vec3; + typedef vec<4, f32, lowp> lowp_f32vec4; + + typedef vec<1, f32, mediump> mediump_f32vec1; + typedef vec<2, f32, mediump> mediump_f32vec2; + typedef vec<3, f32, mediump> mediump_f32vec3; + typedef vec<4, f32, mediump> mediump_f32vec4; + + typedef vec<1, f32, highp> highp_f32vec1; + typedef vec<2, f32, highp> highp_f32vec2; + typedef vec<3, f32, highp> highp_f32vec3; + typedef vec<4, f32, highp> highp_f32vec4; + + typedef vec<1, f32, defaultp> f32vec1; + typedef vec<2, f32, defaultp> f32vec2; + typedef vec<3, f32, defaultp> f32vec3; + typedef vec<4, f32, defaultp> f32vec4; + + typedef vec<1, f64, lowp> lowp_dvec1; + typedef vec<2, f64, lowp> lowp_dvec2; + typedef vec<3, f64, lowp> lowp_dvec3; + typedef vec<4, f64, lowp> lowp_dvec4; + + typedef vec<1, f64, mediump> mediump_dvec1; + typedef vec<2, f64, mediump> mediump_dvec2; + typedef vec<3, f64, mediump> mediump_dvec3; + typedef vec<4, f64, mediump> mediump_dvec4; + + typedef vec<1, f64, highp> highp_dvec1; + typedef vec<2, f64, highp> highp_dvec2; + typedef vec<3, f64, highp> highp_dvec3; + typedef vec<4, f64, highp> highp_dvec4; + + typedef vec<1, f64, defaultp> dvec1; + typedef vec<2, f64, defaultp> dvec2; + typedef vec<3, f64, defaultp> dvec3; + typedef vec<4, f64, defaultp> dvec4; + + typedef vec<1, f64, lowp> lowp_f64vec1; + typedef vec<2, f64, lowp> lowp_f64vec2; + typedef vec<3, f64, lowp> lowp_f64vec3; + typedef vec<4, f64, lowp> lowp_f64vec4; + + typedef vec<1, f64, mediump> mediump_f64vec1; + typedef vec<2, f64, mediump> mediump_f64vec2; + typedef vec<3, f64, mediump> mediump_f64vec3; + typedef vec<4, f64, mediump> mediump_f64vec4; + + typedef vec<1, f64, highp> highp_f64vec1; + typedef vec<2, f64, highp> highp_f64vec2; + typedef vec<3, f64, highp> highp_f64vec3; + typedef vec<4, f64, highp> highp_f64vec4; + + typedef vec<1, f64, defaultp> f64vec1; + typedef vec<2, f64, defaultp> f64vec2; + typedef vec<3, f64, defaultp> f64vec3; + typedef vec<4, f64, defaultp> f64vec4; + + // Matrix NxN + + typedef mat<2, 2, f32, lowp> lowp_mat2; + typedef mat<3, 3, f32, lowp> lowp_mat3; + typedef mat<4, 4, f32, lowp> lowp_mat4; + + typedef mat<2, 2, f32, mediump> mediump_mat2; + typedef mat<3, 3, f32, mediump> mediump_mat3; + typedef mat<4, 4, f32, mediump> mediump_mat4; + + typedef mat<2, 2, f32, highp> highp_mat2; + typedef mat<3, 3, f32, highp> highp_mat3; + typedef mat<4, 4, f32, highp> highp_mat4; + + typedef mat<2, 2, f32, defaultp> mat2; + typedef mat<3, 3, f32, defaultp> mat3; + typedef mat<4, 4, f32, defaultp> mat4; + + typedef mat<2, 2, f32, lowp> lowp_fmat2; + typedef mat<3, 3, f32, lowp> lowp_fmat3; + typedef mat<4, 4, f32, lowp> lowp_fmat4; + + typedef mat<2, 2, f32, mediump> mediump_fmat2; + typedef mat<3, 3, f32, mediump> mediump_fmat3; + typedef mat<4, 4, f32, mediump> mediump_fmat4; + + typedef mat<2, 2, f32, highp> highp_fmat2; + typedef mat<3, 3, f32, highp> highp_fmat3; + typedef mat<4, 4, f32, highp> highp_fmat4; + + typedef mat<2, 2, f32, defaultp> fmat2; + typedef mat<3, 3, f32, defaultp> fmat3; + typedef mat<4, 4, f32, defaultp> fmat4; + + typedef mat<2, 2, f32, lowp> lowp_f32mat2; + typedef mat<3, 3, f32, lowp> lowp_f32mat3; + typedef mat<4, 4, f32, lowp> lowp_f32mat4; + + typedef mat<2, 2, f32, mediump> mediump_f32mat2; + typedef mat<3, 3, f32, mediump> mediump_f32mat3; + typedef mat<4, 4, f32, mediump> mediump_f32mat4; + + typedef mat<2, 2, f32, highp> highp_f32mat2; + typedef mat<3, 3, f32, highp> highp_f32mat3; + typedef mat<4, 4, f32, highp> highp_f32mat4; + + typedef mat<2, 2, f32, defaultp> f32mat2; + typedef mat<3, 3, f32, defaultp> f32mat3; + typedef mat<4, 4, f32, defaultp> f32mat4; + + typedef mat<2, 2, f64, lowp> lowp_dmat2; + typedef mat<3, 3, f64, lowp> lowp_dmat3; + typedef mat<4, 4, f64, lowp> lowp_dmat4; + + typedef mat<2, 2, f64, mediump> mediump_dmat2; + typedef mat<3, 3, f64, mediump> mediump_dmat3; + typedef mat<4, 4, f64, mediump> mediump_dmat4; + + typedef mat<2, 2, f64, highp> highp_dmat2; + typedef mat<3, 3, f64, highp> highp_dmat3; + typedef mat<4, 4, f64, highp> highp_dmat4; + + typedef mat<2, 2, f64, defaultp> dmat2; + typedef mat<3, 3, f64, defaultp> dmat3; + typedef mat<4, 4, f64, defaultp> dmat4; + + typedef mat<2, 2, f64, lowp> lowp_f64mat2; + typedef mat<3, 3, f64, lowp> lowp_f64mat3; + typedef mat<4, 4, f64, lowp> lowp_f64mat4; + + typedef mat<2, 2, f64, mediump> mediump_f64mat2; + typedef mat<3, 3, f64, mediump> mediump_f64mat3; + typedef mat<4, 4, f64, mediump> mediump_f64mat4; + + typedef mat<2, 2, f64, highp> highp_f64mat2; + typedef mat<3, 3, f64, highp> highp_f64mat3; + typedef mat<4, 4, f64, highp> highp_f64mat4; + + typedef mat<2, 2, f64, defaultp> f64mat2; + typedef mat<3, 3, f64, defaultp> f64mat3; + typedef mat<4, 4, f64, defaultp> f64mat4; + + // Matrix MxN + + typedef mat<2, 2, f32, lowp> lowp_mat2x2; + typedef mat<2, 3, f32, lowp> lowp_mat2x3; + typedef mat<2, 4, f32, lowp> lowp_mat2x4; + typedef mat<3, 2, f32, lowp> lowp_mat3x2; + typedef mat<3, 3, f32, lowp> lowp_mat3x3; + typedef mat<3, 4, f32, lowp> lowp_mat3x4; + typedef mat<4, 2, f32, lowp> lowp_mat4x2; + typedef mat<4, 3, f32, lowp> lowp_mat4x3; + typedef mat<4, 4, f32, lowp> lowp_mat4x4; + + typedef mat<2, 2, f32, mediump> mediump_mat2x2; + typedef mat<2, 3, f32, mediump> mediump_mat2x3; + typedef mat<2, 4, f32, mediump> mediump_mat2x4; + typedef mat<3, 2, f32, mediump> mediump_mat3x2; + typedef mat<3, 3, f32, mediump> mediump_mat3x3; + typedef mat<3, 4, f32, mediump> mediump_mat3x4; + typedef mat<4, 2, f32, mediump> mediump_mat4x2; + typedef mat<4, 3, f32, mediump> mediump_mat4x3; + typedef mat<4, 4, f32, mediump> mediump_mat4x4; + + typedef mat<2, 2, f32, highp> highp_mat2x2; + typedef mat<2, 3, f32, highp> highp_mat2x3; + typedef mat<2, 4, f32, highp> highp_mat2x4; + typedef mat<3, 2, f32, highp> highp_mat3x2; + typedef mat<3, 3, f32, highp> highp_mat3x3; + typedef mat<3, 4, f32, highp> highp_mat3x4; + typedef mat<4, 2, f32, highp> highp_mat4x2; + typedef mat<4, 3, f32, highp> highp_mat4x3; + typedef mat<4, 4, f32, highp> highp_mat4x4; + + typedef mat<2, 2, f32, defaultp> mat2x2; + typedef mat<3, 2, f32, defaultp> mat3x2; + typedef mat<4, 2, f32, defaultp> mat4x2; + typedef mat<2, 3, f32, defaultp> mat2x3; + typedef mat<3, 3, f32, defaultp> mat3x3; + typedef mat<4, 3, f32, defaultp> mat4x3; + typedef mat<2, 4, f32, defaultp> mat2x4; + typedef mat<3, 4, f32, defaultp> mat3x4; + typedef mat<4, 4, f32, defaultp> mat4x4; + + typedef mat<2, 2, f32, lowp> lowp_fmat2x2; + typedef mat<2, 3, f32, lowp> lowp_fmat2x3; + typedef mat<2, 4, f32, lowp> lowp_fmat2x4; + typedef mat<3, 2, f32, lowp> lowp_fmat3x2; + typedef mat<3, 3, f32, lowp> lowp_fmat3x3; + typedef mat<3, 4, f32, lowp> lowp_fmat3x4; + typedef mat<4, 2, f32, lowp> lowp_fmat4x2; + typedef mat<4, 3, f32, lowp> lowp_fmat4x3; + typedef mat<4, 4, f32, lowp> lowp_fmat4x4; + + typedef mat<2, 2, f32, mediump> mediump_fmat2x2; + typedef mat<2, 3, f32, mediump> mediump_fmat2x3; + typedef mat<2, 4, f32, mediump> mediump_fmat2x4; + typedef mat<3, 2, f32, mediump> mediump_fmat3x2; + typedef mat<3, 3, f32, mediump> mediump_fmat3x3; + typedef mat<3, 4, f32, mediump> mediump_fmat3x4; + typedef mat<4, 2, f32, mediump> mediump_fmat4x2; + typedef mat<4, 3, f32, mediump> mediump_fmat4x3; + typedef mat<4, 4, f32, mediump> mediump_fmat4x4; + + typedef mat<2, 2, f32, highp> highp_fmat2x2; + typedef mat<2, 3, f32, highp> highp_fmat2x3; + typedef mat<2, 4, f32, highp> highp_fmat2x4; + typedef mat<3, 2, f32, highp> highp_fmat3x2; + typedef mat<3, 3, f32, highp> highp_fmat3x3; + typedef mat<3, 4, f32, highp> highp_fmat3x4; + typedef mat<4, 2, f32, highp> highp_fmat4x2; + typedef mat<4, 3, f32, highp> highp_fmat4x3; + typedef mat<4, 4, f32, highp> highp_fmat4x4; + + typedef mat<2, 2, f32, defaultp> fmat2x2; + typedef mat<3, 2, f32, defaultp> fmat3x2; + typedef mat<4, 2, f32, defaultp> fmat4x2; + typedef mat<2, 3, f32, defaultp> fmat2x3; + typedef mat<3, 3, f32, defaultp> fmat3x3; + typedef mat<4, 3, f32, defaultp> fmat4x3; + typedef mat<2, 4, f32, defaultp> fmat2x4; + typedef mat<3, 4, f32, defaultp> fmat3x4; + typedef mat<4, 4, f32, defaultp> fmat4x4; + + typedef mat<2, 2, f32, lowp> lowp_f32mat2x2; + typedef mat<2, 3, f32, lowp> lowp_f32mat2x3; + typedef mat<2, 4, f32, lowp> lowp_f32mat2x4; + typedef mat<3, 2, f32, lowp> lowp_f32mat3x2; + typedef mat<3, 3, f32, lowp> lowp_f32mat3x3; + typedef mat<3, 4, f32, lowp> lowp_f32mat3x4; + typedef mat<4, 2, f32, lowp> lowp_f32mat4x2; + typedef mat<4, 3, f32, lowp> lowp_f32mat4x3; + typedef mat<4, 4, f32, lowp> lowp_f32mat4x4; + + typedef mat<2, 2, f32, mediump> mediump_f32mat2x2; + typedef mat<2, 3, f32, mediump> mediump_f32mat2x3; + typedef mat<2, 4, f32, mediump> mediump_f32mat2x4; + typedef mat<3, 2, f32, mediump> mediump_f32mat3x2; + typedef mat<3, 3, f32, mediump> mediump_f32mat3x3; + typedef mat<3, 4, f32, mediump> mediump_f32mat3x4; + typedef mat<4, 2, f32, mediump> mediump_f32mat4x2; + typedef mat<4, 3, f32, mediump> mediump_f32mat4x3; + typedef mat<4, 4, f32, mediump> mediump_f32mat4x4; + + typedef mat<2, 2, f32, highp> highp_f32mat2x2; + typedef mat<2, 3, f32, highp> highp_f32mat2x3; + typedef mat<2, 4, f32, highp> highp_f32mat2x4; + typedef mat<3, 2, f32, highp> highp_f32mat3x2; + typedef mat<3, 3, f32, highp> highp_f32mat3x3; + typedef mat<3, 4, f32, highp> highp_f32mat3x4; + typedef mat<4, 2, f32, highp> highp_f32mat4x2; + typedef mat<4, 3, f32, highp> highp_f32mat4x3; + typedef mat<4, 4, f32, highp> highp_f32mat4x4; + + typedef mat<2, 2, f32, defaultp> f32mat2x2; + typedef mat<3, 2, f32, defaultp> f32mat3x2; + typedef mat<4, 2, f32, defaultp> f32mat4x2; + typedef mat<2, 3, f32, defaultp> f32mat2x3; + typedef mat<3, 3, f32, defaultp> f32mat3x3; + typedef mat<4, 3, f32, defaultp> f32mat4x3; + typedef mat<2, 4, f32, defaultp> f32mat2x4; + typedef mat<3, 4, f32, defaultp> f32mat3x4; + typedef mat<4, 4, f32, defaultp> f32mat4x4; + + typedef mat<2, 2, double, lowp> lowp_dmat2x2; + typedef mat<2, 3, double, lowp> lowp_dmat2x3; + typedef mat<2, 4, double, lowp> lowp_dmat2x4; + typedef mat<3, 2, double, lowp> lowp_dmat3x2; + typedef mat<3, 3, double, lowp> lowp_dmat3x3; + typedef mat<3, 4, double, lowp> lowp_dmat3x4; + typedef mat<4, 2, double, lowp> lowp_dmat4x2; + typedef mat<4, 3, double, lowp> lowp_dmat4x3; + typedef mat<4, 4, double, lowp> lowp_dmat4x4; + + typedef mat<2, 2, double, mediump> mediump_dmat2x2; + typedef mat<2, 3, double, mediump> mediump_dmat2x3; + typedef mat<2, 4, double, mediump> mediump_dmat2x4; + typedef mat<3, 2, double, mediump> mediump_dmat3x2; + typedef mat<3, 3, double, mediump> mediump_dmat3x3; + typedef mat<3, 4, double, mediump> mediump_dmat3x4; + typedef mat<4, 2, double, mediump> mediump_dmat4x2; + typedef mat<4, 3, double, mediump> mediump_dmat4x3; + typedef mat<4, 4, double, mediump> mediump_dmat4x4; + + typedef mat<2, 2, double, highp> highp_dmat2x2; + typedef mat<2, 3, double, highp> highp_dmat2x3; + typedef mat<2, 4, double, highp> highp_dmat2x4; + typedef mat<3, 2, double, highp> highp_dmat3x2; + typedef mat<3, 3, double, highp> highp_dmat3x3; + typedef mat<3, 4, double, highp> highp_dmat3x4; + typedef mat<4, 2, double, highp> highp_dmat4x2; + typedef mat<4, 3, double, highp> highp_dmat4x3; + typedef mat<4, 4, double, highp> highp_dmat4x4; + + typedef mat<2, 2, double, defaultp> dmat2x2; + typedef mat<3, 2, double, defaultp> dmat3x2; + typedef mat<4, 2, double, defaultp> dmat4x2; + typedef mat<2, 3, double, defaultp> dmat2x3; + typedef mat<3, 3, double, defaultp> dmat3x3; + typedef mat<4, 3, double, defaultp> dmat4x3; + typedef mat<2, 4, double, defaultp> dmat2x4; + typedef mat<3, 4, double, defaultp> dmat3x4; + typedef mat<4, 4, double, defaultp> dmat4x4; + + typedef mat<2, 2, f64, lowp> lowp_f64mat2x2; + typedef mat<2, 3, f64, lowp> lowp_f64mat2x3; + typedef mat<2, 4, f64, lowp> lowp_f64mat2x4; + typedef mat<3, 2, f64, lowp> lowp_f64mat3x2; + typedef mat<3, 3, f64, lowp> lowp_f64mat3x3; + typedef mat<3, 4, f64, lowp> lowp_f64mat3x4; + typedef mat<4, 2, f64, lowp> lowp_f64mat4x2; + typedef mat<4, 3, f64, lowp> lowp_f64mat4x3; + typedef mat<4, 4, f64, lowp> lowp_f64mat4x4; + + typedef mat<2, 2, f64, mediump> mediump_f64mat2x2; + typedef mat<2, 3, f64, mediump> mediump_f64mat2x3; + typedef mat<2, 4, f64, mediump> mediump_f64mat2x4; + typedef mat<3, 2, f64, mediump> mediump_f64mat3x2; + typedef mat<3, 3, f64, mediump> mediump_f64mat3x3; + typedef mat<3, 4, f64, mediump> mediump_f64mat3x4; + typedef mat<4, 2, f64, mediump> mediump_f64mat4x2; + typedef mat<4, 3, f64, mediump> mediump_f64mat4x3; + typedef mat<4, 4, f64, mediump> mediump_f64mat4x4; + + typedef mat<2, 2, f64, highp> highp_f64mat2x2; + typedef mat<2, 3, f64, highp> highp_f64mat2x3; + typedef mat<2, 4, f64, highp> highp_f64mat2x4; + typedef mat<3, 2, f64, highp> highp_f64mat3x2; + typedef mat<3, 3, f64, highp> highp_f64mat3x3; + typedef mat<3, 4, f64, highp> highp_f64mat3x4; + typedef mat<4, 2, f64, highp> highp_f64mat4x2; + typedef mat<4, 3, f64, highp> highp_f64mat4x3; + typedef mat<4, 4, f64, highp> highp_f64mat4x4; + + typedef mat<2, 2, f64, defaultp> f64mat2x2; + typedef mat<3, 2, f64, defaultp> f64mat3x2; + typedef mat<4, 2, f64, defaultp> f64mat4x2; + typedef mat<2, 3, f64, defaultp> f64mat2x3; + typedef mat<3, 3, f64, defaultp> f64mat3x3; + typedef mat<4, 3, f64, defaultp> f64mat4x3; + typedef mat<2, 4, f64, defaultp> f64mat2x4; + typedef mat<3, 4, f64, defaultp> f64mat3x4; + typedef mat<4, 4, f64, defaultp> f64mat4x4; + + // Quaternion + + typedef qua lowp_quat; + typedef qua mediump_quat; + typedef qua highp_quat; + typedef qua quat; + + typedef qua lowp_fquat; + typedef qua mediump_fquat; + typedef qua highp_fquat; + typedef qua fquat; + + typedef qua lowp_f32quat; + typedef qua mediump_f32quat; + typedef qua highp_f32quat; + typedef qua f32quat; + + typedef qua lowp_dquat; + typedef qua mediump_dquat; + typedef qua highp_dquat; + typedef qua dquat; + + typedef qua lowp_f64quat; + typedef qua mediump_f64quat; + typedef qua highp_f64quat; + typedef qua f64quat; +}//namespace glm + + diff --git a/MacroLibX/third_party/glm/geometric.hpp b/MacroLibX/third_party/glm/geometric.hpp new file mode 100644 index 0000000..c068a3c --- /dev/null +++ b/MacroLibX/third_party/glm/geometric.hpp @@ -0,0 +1,116 @@ +/// @ref core +/// @file glm/geometric.hpp +/// +/// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions +/// +/// @defgroup core_func_geometric Geometric functions +/// @ingroup core +/// +/// These operate on vectors as vectors, not component-wise. +/// +/// Include to use these core features. + +#pragma once + +#include "detail/type_vec3.hpp" + +namespace glm +{ + /// @addtogroup core_func_geometric + /// @{ + + /// Returns the length of x, i.e., sqrt(x * x). + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL length man page + /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions + template + GLM_FUNC_DECL T length(vec const& x); + + /// Returns the distance betwwen p0 and p1, i.e., length(p0 - p1). + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL distance man page + /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions + template + GLM_FUNC_DECL T distance(vec const& p0, vec const& p1); + + /// Returns the dot product of x and y, i.e., result = x * y. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL dot man page + /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions + template + GLM_FUNC_DECL T dot(vec const& x, vec const& y); + + /// Returns the cross product of x and y. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL cross man page + /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions + template + GLM_FUNC_DECL vec<3, T, Q> cross(vec<3, T, Q> const& x, vec<3, T, Q> const& y); + + /// Returns a vector in the same direction as x but with length of 1. + /// According to issue 10 GLSL 1.10 specification, if length(x) == 0 then result is undefined and generate an error. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL normalize man page + /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions + template + GLM_FUNC_DECL vec normalize(vec const& x); + + /// If dot(Nref, I) < 0.0, return N, otherwise, return -N. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL faceforward man page + /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions + template + GLM_FUNC_DECL vec faceforward( + vec const& N, + vec const& I, + vec const& Nref); + + /// For the incident vector I and surface orientation N, + /// returns the reflection direction : result = I - 2.0 * dot(N, I) * N. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL reflect man page + /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions + template + GLM_FUNC_DECL vec reflect( + vec const& I, + vec const& N); + + /// For the incident vector I and surface normal N, + /// and the ratio of indices of refraction eta, + /// return the refraction vector. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Floating-point scalar types. + /// + /// @see GLSL refract man page + /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions + template + GLM_FUNC_DECL vec refract( + vec const& I, + vec const& N, + T eta); + + /// @} +}//namespace glm + +#include "detail/func_geometric.inl" diff --git a/MacroLibX/third_party/glm/glm.hpp b/MacroLibX/third_party/glm/glm.hpp new file mode 100644 index 0000000..8b61064 --- /dev/null +++ b/MacroLibX/third_party/glm/glm.hpp @@ -0,0 +1,136 @@ +/// @ref core +/// @file glm/glm.hpp +/// +/// @defgroup core Core features +/// +/// @brief Features that implement in C++ the GLSL specification as closely as possible. +/// +/// The GLM core consists of C++ types that mirror GLSL types and +/// C++ functions that mirror the GLSL functions. +/// +/// The best documentation for GLM Core is the current GLSL specification, +/// version 4.2 +/// (pdf file). +/// +/// GLM core functionalities require to be included to be used. +/// +/// +/// @defgroup core_vector Vector types +/// +/// Vector types of two to four components with an exhaustive set of operators. +/// +/// @ingroup core +/// +/// +/// @defgroup core_vector_precision Vector types with precision qualifiers +/// +/// @brief Vector types with precision qualifiers which may result in various precision in term of ULPs +/// +/// GLSL allows defining qualifiers for particular variables. +/// With OpenGL's GLSL, these qualifiers have no effect; they are there for compatibility, +/// with OpenGL ES's GLSL, these qualifiers do have an effect. +/// +/// C++ has no language equivalent to qualifier qualifiers. So GLM provides the next-best thing: +/// a number of typedefs that use a particular qualifier. +/// +/// None of these types make any guarantees about the actual qualifier used. +/// +/// @ingroup core +/// +/// +/// @defgroup core_matrix Matrix types +/// +/// Matrix types of with C columns and R rows where C and R are values between 2 to 4 included. +/// These types have exhaustive sets of operators. +/// +/// @ingroup core +/// +/// +/// @defgroup core_matrix_precision Matrix types with precision qualifiers +/// +/// @brief Matrix types with precision qualifiers which may result in various precision in term of ULPs +/// +/// GLSL allows defining qualifiers for particular variables. +/// With OpenGL's GLSL, these qualifiers have no effect; they are there for compatibility, +/// with OpenGL ES's GLSL, these qualifiers do have an effect. +/// +/// C++ has no language equivalent to qualifier qualifiers. So GLM provides the next-best thing: +/// a number of typedefs that use a particular qualifier. +/// +/// None of these types make any guarantees about the actual qualifier used. +/// +/// @ingroup core +/// +/// +/// @defgroup ext Stable extensions +/// +/// @brief Additional features not specified by GLSL specification. +/// +/// EXT extensions are fully tested and documented. +/// +/// Even if it's highly unrecommended, it's possible to include all the extensions at once by +/// including . Otherwise, each extension needs to be included a specific file. +/// +/// +/// @defgroup gtc Recommended extensions +/// +/// @brief Additional features not specified by GLSL specification. +/// +/// GTC extensions aim to be stable with tests and documentation. +/// +/// Even if it's highly unrecommended, it's possible to include all the extensions at once by +/// including . Otherwise, each extension needs to be included a specific file. +/// +/// +/// @defgroup gtx Experimental extensions +/// +/// @brief Experimental features not specified by GLSL specification. +/// +/// Experimental extensions are useful functions and types, but the development of +/// their API and functionality is not necessarily stable. They can change +/// substantially between versions. Backwards compatibility is not much of an issue +/// for them. +/// +/// Even if it's highly unrecommended, it's possible to include all the extensions +/// at once by including . Otherwise, each extension needs to be +/// included a specific file. +/// +/// @mainpage OpenGL Mathematics (GLM) +/// - Website: glm.g-truc.net +/// - GLM API documentation +/// - GLM Manual + +#include "detail/_fixes.hpp" + +#include "detail/setup.hpp" + +#pragma once + +#include +#include +#include +#include +#include +#include "fwd.hpp" + +#include "vec2.hpp" +#include "vec3.hpp" +#include "vec4.hpp" +#include "mat2x2.hpp" +#include "mat2x3.hpp" +#include "mat2x4.hpp" +#include "mat3x2.hpp" +#include "mat3x3.hpp" +#include "mat3x4.hpp" +#include "mat4x2.hpp" +#include "mat4x3.hpp" +#include "mat4x4.hpp" + +#include "trigonometric.hpp" +#include "exponential.hpp" +#include "common.hpp" +#include "packing.hpp" +#include "geometric.hpp" +#include "matrix.hpp" +#include "vector_relational.hpp" +#include "integer.hpp" diff --git a/MacroLibX/third_party/glm/gtc/bitfield.hpp b/MacroLibX/third_party/glm/gtc/bitfield.hpp new file mode 100644 index 0000000..084fbe7 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/bitfield.hpp @@ -0,0 +1,266 @@ +/// @ref gtc_bitfield +/// @file glm/gtc/bitfield.hpp +/// +/// @see core (dependence) +/// @see gtc_bitfield (dependence) +/// +/// @defgroup gtc_bitfield GLM_GTC_bitfield +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Allow to perform bit operations on integer values + +#include "../detail/setup.hpp" + +#pragma once + +// Dependencies +#include "../ext/scalar_int_sized.hpp" +#include "../ext/scalar_uint_sized.hpp" +#include "../detail/qualifier.hpp" +#include "../detail/_vectorize.hpp" +#include "type_precision.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_bitfield extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_bitfield + /// @{ + + /// Build a mask of 'count' bits + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL genIUType mask(genIUType Bits); + + /// Build a mask of 'count' bits + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed and unsigned integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL vec mask(vec const& v); + + /// Rotate all bits to the right. All the bits dropped in the right side are inserted back on the left side. + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL genIUType bitfieldRotateRight(genIUType In, int Shift); + + /// Rotate all bits to the right. All the bits dropped in the right side are inserted back on the left side. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed and unsigned integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL vec bitfieldRotateRight(vec const& In, int Shift); + + /// Rotate all bits to the left. All the bits dropped in the left side are inserted back on the right side. + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL genIUType bitfieldRotateLeft(genIUType In, int Shift); + + /// Rotate all bits to the left. All the bits dropped in the left side are inserted back on the right side. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed and unsigned integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL vec bitfieldRotateLeft(vec const& In, int Shift); + + /// Set to 1 a range of bits. + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL genIUType bitfieldFillOne(genIUType Value, int FirstBit, int BitCount); + + /// Set to 1 a range of bits. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed and unsigned integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL vec bitfieldFillOne(vec const& Value, int FirstBit, int BitCount); + + /// Set to 0 a range of bits. + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL genIUType bitfieldFillZero(genIUType Value, int FirstBit, int BitCount); + + /// Set to 0 a range of bits. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Signed and unsigned integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_bitfield + template + GLM_FUNC_DECL vec bitfieldFillZero(vec const& Value, int FirstBit, int BitCount); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of x followed by the first bit of y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL int16 bitfieldInterleave(int8 x, int8 y); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of x followed by the first bit of y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint16 bitfieldInterleave(uint8 x, uint8 y); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of v.x followed by the first bit of v.y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint16 bitfieldInterleave(u8vec2 const& v); + + /// Deinterleaves the bits of x. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL glm::u8vec2 bitfieldDeinterleave(glm::uint16 x); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of x followed by the first bit of y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL int32 bitfieldInterleave(int16 x, int16 y); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of x followed by the first bit of y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint32 bitfieldInterleave(uint16 x, uint16 y); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of v.x followed by the first bit of v.y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint32 bitfieldInterleave(u16vec2 const& v); + + /// Deinterleaves the bits of x. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL glm::u16vec2 bitfieldDeinterleave(glm::uint32 x); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of x followed by the first bit of y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL int64 bitfieldInterleave(int32 x, int32 y); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of x followed by the first bit of y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint64 bitfieldInterleave(uint32 x, uint32 y); + + /// Interleaves the bits of x and y. + /// The first bit is the first bit of v.x followed by the first bit of v.y. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint64 bitfieldInterleave(u32vec2 const& v); + + /// Deinterleaves the bits of x. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL glm::u32vec2 bitfieldDeinterleave(glm::uint64 x); + + /// Interleaves the bits of x, y and z. + /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL int32 bitfieldInterleave(int8 x, int8 y, int8 z); + + /// Interleaves the bits of x, y and z. + /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint32 bitfieldInterleave(uint8 x, uint8 y, uint8 z); + + /// Interleaves the bits of x, y and z. + /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL int64 bitfieldInterleave(int16 x, int16 y, int16 z); + + /// Interleaves the bits of x, y and z. + /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint64 bitfieldInterleave(uint16 x, uint16 y, uint16 z); + + /// Interleaves the bits of x, y and z. + /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL int64 bitfieldInterleave(int32 x, int32 y, int32 z); + + /// Interleaves the bits of x, y and z. + /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint64 bitfieldInterleave(uint32 x, uint32 y, uint32 z); + + /// Interleaves the bits of x, y, z and w. + /// The first bit is the first bit of x followed by the first bit of y, the first bit of z and finally the first bit of w. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL int32 bitfieldInterleave(int8 x, int8 y, int8 z, int8 w); + + /// Interleaves the bits of x, y, z and w. + /// The first bit is the first bit of x followed by the first bit of y, the first bit of z and finally the first bit of w. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint32 bitfieldInterleave(uint8 x, uint8 y, uint8 z, uint8 w); + + /// Interleaves the bits of x, y, z and w. + /// The first bit is the first bit of x followed by the first bit of y, the first bit of z and finally the first bit of w. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL int64 bitfieldInterleave(int16 x, int16 y, int16 z, int16 w); + + /// Interleaves the bits of x, y, z and w. + /// The first bit is the first bit of x followed by the first bit of y, the first bit of z and finally the first bit of w. + /// The other bits are interleaved following the previous sequence. + /// + /// @see gtc_bitfield + GLM_FUNC_DECL uint64 bitfieldInterleave(uint16 x, uint16 y, uint16 z, uint16 w); + + /// @} +} //namespace glm + +#include "bitfield.inl" diff --git a/MacroLibX/third_party/glm/gtc/bitfield.inl b/MacroLibX/third_party/glm/gtc/bitfield.inl new file mode 100644 index 0000000..06cf188 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/bitfield.inl @@ -0,0 +1,626 @@ +/// @ref gtc_bitfield + +#include "../simd/integer.h" + +namespace glm{ +namespace detail +{ + template + GLM_FUNC_DECL RET bitfieldInterleave(PARAM x, PARAM y); + + template + GLM_FUNC_DECL RET bitfieldInterleave(PARAM x, PARAM y, PARAM z); + + template + GLM_FUNC_DECL RET bitfieldInterleave(PARAM x, PARAM y, PARAM z, PARAM w); + + template<> + GLM_FUNC_QUALIFIER glm::uint16 bitfieldInterleave(glm::uint8 x, glm::uint8 y) + { + glm::uint16 REG1(x); + glm::uint16 REG2(y); + + REG1 = ((REG1 << 4) | REG1) & static_cast(0x0F0F); + REG2 = ((REG2 << 4) | REG2) & static_cast(0x0F0F); + + REG1 = ((REG1 << 2) | REG1) & static_cast(0x3333); + REG2 = ((REG2 << 2) | REG2) & static_cast(0x3333); + + REG1 = ((REG1 << 1) | REG1) & static_cast(0x5555); + REG2 = ((REG2 << 1) | REG2) & static_cast(0x5555); + + return REG1 | static_cast(REG2 << 1); + } + + template<> + GLM_FUNC_QUALIFIER glm::uint32 bitfieldInterleave(glm::uint16 x, glm::uint16 y) + { + glm::uint32 REG1(x); + glm::uint32 REG2(y); + + REG1 = ((REG1 << 8) | REG1) & static_cast(0x00FF00FF); + REG2 = ((REG2 << 8) | REG2) & static_cast(0x00FF00FF); + + REG1 = ((REG1 << 4) | REG1) & static_cast(0x0F0F0F0F); + REG2 = ((REG2 << 4) | REG2) & static_cast(0x0F0F0F0F); + + REG1 = ((REG1 << 2) | REG1) & static_cast(0x33333333); + REG2 = ((REG2 << 2) | REG2) & static_cast(0x33333333); + + REG1 = ((REG1 << 1) | REG1) & static_cast(0x55555555); + REG2 = ((REG2 << 1) | REG2) & static_cast(0x55555555); + + return REG1 | (REG2 << 1); + } + + template<> + GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(glm::uint32 x, glm::uint32 y) + { + glm::uint64 REG1(x); + glm::uint64 REG2(y); + + REG1 = ((REG1 << 16) | REG1) & static_cast(0x0000FFFF0000FFFFull); + REG2 = ((REG2 << 16) | REG2) & static_cast(0x0000FFFF0000FFFFull); + + REG1 = ((REG1 << 8) | REG1) & static_cast(0x00FF00FF00FF00FFull); + REG2 = ((REG2 << 8) | REG2) & static_cast(0x00FF00FF00FF00FFull); + + REG1 = ((REG1 << 4) | REG1) & static_cast(0x0F0F0F0F0F0F0F0Full); + REG2 = ((REG2 << 4) | REG2) & static_cast(0x0F0F0F0F0F0F0F0Full); + + REG1 = ((REG1 << 2) | REG1) & static_cast(0x3333333333333333ull); + REG2 = ((REG2 << 2) | REG2) & static_cast(0x3333333333333333ull); + + REG1 = ((REG1 << 1) | REG1) & static_cast(0x5555555555555555ull); + REG2 = ((REG2 << 1) | REG2) & static_cast(0x5555555555555555ull); + + return REG1 | (REG2 << 1); + } + + template<> + GLM_FUNC_QUALIFIER glm::uint32 bitfieldInterleave(glm::uint8 x, glm::uint8 y, glm::uint8 z) + { + glm::uint32 REG1(x); + glm::uint32 REG2(y); + glm::uint32 REG3(z); + + REG1 = ((REG1 << 16) | REG1) & static_cast(0xFF0000FFu); + REG2 = ((REG2 << 16) | REG2) & static_cast(0xFF0000FFu); + REG3 = ((REG3 << 16) | REG3) & static_cast(0xFF0000FFu); + + REG1 = ((REG1 << 8) | REG1) & static_cast(0x0F00F00Fu); + REG2 = ((REG2 << 8) | REG2) & static_cast(0x0F00F00Fu); + REG3 = ((REG3 << 8) | REG3) & static_cast(0x0F00F00Fu); + + REG1 = ((REG1 << 4) | REG1) & static_cast(0xC30C30C3u); + REG2 = ((REG2 << 4) | REG2) & static_cast(0xC30C30C3u); + REG3 = ((REG3 << 4) | REG3) & static_cast(0xC30C30C3u); + + REG1 = ((REG1 << 2) | REG1) & static_cast(0x49249249u); + REG2 = ((REG2 << 2) | REG2) & static_cast(0x49249249u); + REG3 = ((REG3 << 2) | REG3) & static_cast(0x49249249u); + + return REG1 | (REG2 << 1) | (REG3 << 2); + } + + template<> + GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(glm::uint16 x, glm::uint16 y, glm::uint16 z) + { + glm::uint64 REG1(x); + glm::uint64 REG2(y); + glm::uint64 REG3(z); + + REG1 = ((REG1 << 32) | REG1) & static_cast(0xFFFF00000000FFFFull); + REG2 = ((REG2 << 32) | REG2) & static_cast(0xFFFF00000000FFFFull); + REG3 = ((REG3 << 32) | REG3) & static_cast(0xFFFF00000000FFFFull); + + REG1 = ((REG1 << 16) | REG1) & static_cast(0x00FF0000FF0000FFull); + REG2 = ((REG2 << 16) | REG2) & static_cast(0x00FF0000FF0000FFull); + REG3 = ((REG3 << 16) | REG3) & static_cast(0x00FF0000FF0000FFull); + + REG1 = ((REG1 << 8) | REG1) & static_cast(0xF00F00F00F00F00Full); + REG2 = ((REG2 << 8) | REG2) & static_cast(0xF00F00F00F00F00Full); + REG3 = ((REG3 << 8) | REG3) & static_cast(0xF00F00F00F00F00Full); + + REG1 = ((REG1 << 4) | REG1) & static_cast(0x30C30C30C30C30C3ull); + REG2 = ((REG2 << 4) | REG2) & static_cast(0x30C30C30C30C30C3ull); + REG3 = ((REG3 << 4) | REG3) & static_cast(0x30C30C30C30C30C3ull); + + REG1 = ((REG1 << 2) | REG1) & static_cast(0x9249249249249249ull); + REG2 = ((REG2 << 2) | REG2) & static_cast(0x9249249249249249ull); + REG3 = ((REG3 << 2) | REG3) & static_cast(0x9249249249249249ull); + + return REG1 | (REG2 << 1) | (REG3 << 2); + } + + template<> + GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(glm::uint32 x, glm::uint32 y, glm::uint32 z) + { + glm::uint64 REG1(x); + glm::uint64 REG2(y); + glm::uint64 REG3(z); + + REG1 = ((REG1 << 32) | REG1) & static_cast(0xFFFF00000000FFFFull); + REG2 = ((REG2 << 32) | REG2) & static_cast(0xFFFF00000000FFFFull); + REG3 = ((REG3 << 32) | REG3) & static_cast(0xFFFF00000000FFFFull); + + REG1 = ((REG1 << 16) | REG1) & static_cast(0x00FF0000FF0000FFull); + REG2 = ((REG2 << 16) | REG2) & static_cast(0x00FF0000FF0000FFull); + REG3 = ((REG3 << 16) | REG3) & static_cast(0x00FF0000FF0000FFull); + + REG1 = ((REG1 << 8) | REG1) & static_cast(0xF00F00F00F00F00Full); + REG2 = ((REG2 << 8) | REG2) & static_cast(0xF00F00F00F00F00Full); + REG3 = ((REG3 << 8) | REG3) & static_cast(0xF00F00F00F00F00Full); + + REG1 = ((REG1 << 4) | REG1) & static_cast(0x30C30C30C30C30C3ull); + REG2 = ((REG2 << 4) | REG2) & static_cast(0x30C30C30C30C30C3ull); + REG3 = ((REG3 << 4) | REG3) & static_cast(0x30C30C30C30C30C3ull); + + REG1 = ((REG1 << 2) | REG1) & static_cast(0x9249249249249249ull); + REG2 = ((REG2 << 2) | REG2) & static_cast(0x9249249249249249ull); + REG3 = ((REG3 << 2) | REG3) & static_cast(0x9249249249249249ull); + + return REG1 | (REG2 << 1) | (REG3 << 2); + } + + template<> + GLM_FUNC_QUALIFIER glm::uint32 bitfieldInterleave(glm::uint8 x, glm::uint8 y, glm::uint8 z, glm::uint8 w) + { + glm::uint32 REG1(x); + glm::uint32 REG2(y); + glm::uint32 REG3(z); + glm::uint32 REG4(w); + + REG1 = ((REG1 << 12) | REG1) & static_cast(0x000F000Fu); + REG2 = ((REG2 << 12) | REG2) & static_cast(0x000F000Fu); + REG3 = ((REG3 << 12) | REG3) & static_cast(0x000F000Fu); + REG4 = ((REG4 << 12) | REG4) & static_cast(0x000F000Fu); + + REG1 = ((REG1 << 6) | REG1) & static_cast(0x03030303u); + REG2 = ((REG2 << 6) | REG2) & static_cast(0x03030303u); + REG3 = ((REG3 << 6) | REG3) & static_cast(0x03030303u); + REG4 = ((REG4 << 6) | REG4) & static_cast(0x03030303u); + + REG1 = ((REG1 << 3) | REG1) & static_cast(0x11111111u); + REG2 = ((REG2 << 3) | REG2) & static_cast(0x11111111u); + REG3 = ((REG3 << 3) | REG3) & static_cast(0x11111111u); + REG4 = ((REG4 << 3) | REG4) & static_cast(0x11111111u); + + return REG1 | (REG2 << 1) | (REG3 << 2) | (REG4 << 3); + } + + template<> + GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(glm::uint16 x, glm::uint16 y, glm::uint16 z, glm::uint16 w) + { + glm::uint64 REG1(x); + glm::uint64 REG2(y); + glm::uint64 REG3(z); + glm::uint64 REG4(w); + + REG1 = ((REG1 << 24) | REG1) & static_cast(0x000000FF000000FFull); + REG2 = ((REG2 << 24) | REG2) & static_cast(0x000000FF000000FFull); + REG3 = ((REG3 << 24) | REG3) & static_cast(0x000000FF000000FFull); + REG4 = ((REG4 << 24) | REG4) & static_cast(0x000000FF000000FFull); + + REG1 = ((REG1 << 12) | REG1) & static_cast(0x000F000F000F000Full); + REG2 = ((REG2 << 12) | REG2) & static_cast(0x000F000F000F000Full); + REG3 = ((REG3 << 12) | REG3) & static_cast(0x000F000F000F000Full); + REG4 = ((REG4 << 12) | REG4) & static_cast(0x000F000F000F000Full); + + REG1 = ((REG1 << 6) | REG1) & static_cast(0x0303030303030303ull); + REG2 = ((REG2 << 6) | REG2) & static_cast(0x0303030303030303ull); + REG3 = ((REG3 << 6) | REG3) & static_cast(0x0303030303030303ull); + REG4 = ((REG4 << 6) | REG4) & static_cast(0x0303030303030303ull); + + REG1 = ((REG1 << 3) | REG1) & static_cast(0x1111111111111111ull); + REG2 = ((REG2 << 3) | REG2) & static_cast(0x1111111111111111ull); + REG3 = ((REG3 << 3) | REG3) & static_cast(0x1111111111111111ull); + REG4 = ((REG4 << 3) | REG4) & static_cast(0x1111111111111111ull); + + return REG1 | (REG2 << 1) | (REG3 << 2) | (REG4 << 3); + } +}//namespace detail + + template + GLM_FUNC_QUALIFIER genIUType mask(genIUType Bits) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'mask' accepts only integer values"); + + return Bits >= sizeof(genIUType) * 8 ? ~static_cast(0) : (static_cast(1) << Bits) - static_cast(1); + } + + template + GLM_FUNC_QUALIFIER vec mask(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'mask' accepts only integer values"); + + return detail::functor1::call(mask, v); + } + + template + GLM_FUNC_QUALIFIER genIType bitfieldRotateRight(genIType In, int Shift) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldRotateRight' accepts only integer values"); + + int const BitSize = static_cast(sizeof(genIType) * 8); + return (In << static_cast(Shift)) | (In >> static_cast(BitSize - Shift)); + } + + template + GLM_FUNC_QUALIFIER vec bitfieldRotateRight(vec const& In, int Shift) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldRotateRight' accepts only integer values"); + + int const BitSize = static_cast(sizeof(T) * 8); + return (In << static_cast(Shift)) | (In >> static_cast(BitSize - Shift)); + } + + template + GLM_FUNC_QUALIFIER genIType bitfieldRotateLeft(genIType In, int Shift) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldRotateLeft' accepts only integer values"); + + int const BitSize = static_cast(sizeof(genIType) * 8); + return (In >> static_cast(Shift)) | (In << static_cast(BitSize - Shift)); + } + + template + GLM_FUNC_QUALIFIER vec bitfieldRotateLeft(vec const& In, int Shift) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldRotateLeft' accepts only integer values"); + + int const BitSize = static_cast(sizeof(T) * 8); + return (In >> static_cast(Shift)) | (In << static_cast(BitSize - Shift)); + } + + template + GLM_FUNC_QUALIFIER genIUType bitfieldFillOne(genIUType Value, int FirstBit, int BitCount) + { + return Value | static_cast(mask(BitCount) << FirstBit); + } + + template + GLM_FUNC_QUALIFIER vec bitfieldFillOne(vec const& Value, int FirstBit, int BitCount) + { + return Value | static_cast(mask(BitCount) << FirstBit); + } + + template + GLM_FUNC_QUALIFIER genIUType bitfieldFillZero(genIUType Value, int FirstBit, int BitCount) + { + return Value & static_cast(~(mask(BitCount) << FirstBit)); + } + + template + GLM_FUNC_QUALIFIER vec bitfieldFillZero(vec const& Value, int FirstBit, int BitCount) + { + return Value & static_cast(~(mask(BitCount) << FirstBit)); + } + + GLM_FUNC_QUALIFIER int16 bitfieldInterleave(int8 x, int8 y) + { + union sign8 + { + int8 i; + uint8 u; + } sign_x, sign_y; + + union sign16 + { + int16 i; + uint16 u; + } result; + + sign_x.i = x; + sign_y.i = y; + result.u = bitfieldInterleave(sign_x.u, sign_y.u); + + return result.i; + } + + GLM_FUNC_QUALIFIER uint16 bitfieldInterleave(uint8 x, uint8 y) + { + return detail::bitfieldInterleave(x, y); + } + + GLM_FUNC_QUALIFIER uint16 bitfieldInterleave(u8vec2 const& v) + { + return detail::bitfieldInterleave(v.x, v.y); + } + + GLM_FUNC_QUALIFIER u8vec2 bitfieldDeinterleave(glm::uint16 x) + { + uint16 REG1(x); + uint16 REG2(x >>= 1); + + REG1 = REG1 & static_cast(0x5555); + REG2 = REG2 & static_cast(0x5555); + + REG1 = ((REG1 >> 1) | REG1) & static_cast(0x3333); + REG2 = ((REG2 >> 1) | REG2) & static_cast(0x3333); + + REG1 = ((REG1 >> 2) | REG1) & static_cast(0x0F0F); + REG2 = ((REG2 >> 2) | REG2) & static_cast(0x0F0F); + + REG1 = ((REG1 >> 4) | REG1) & static_cast(0x00FF); + REG2 = ((REG2 >> 4) | REG2) & static_cast(0x00FF); + + REG1 = ((REG1 >> 8) | REG1) & static_cast(0xFFFF); + REG2 = ((REG2 >> 8) | REG2) & static_cast(0xFFFF); + + return glm::u8vec2(REG1, REG2); + } + + GLM_FUNC_QUALIFIER int32 bitfieldInterleave(int16 x, int16 y) + { + union sign16 + { + int16 i; + uint16 u; + } sign_x, sign_y; + + union sign32 + { + int32 i; + uint32 u; + } result; + + sign_x.i = x; + sign_y.i = y; + result.u = bitfieldInterleave(sign_x.u, sign_y.u); + + return result.i; + } + + GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(uint16 x, uint16 y) + { + return detail::bitfieldInterleave(x, y); + } + + GLM_FUNC_QUALIFIER glm::uint32 bitfieldInterleave(u16vec2 const& v) + { + return detail::bitfieldInterleave(v.x, v.y); + } + + GLM_FUNC_QUALIFIER glm::u16vec2 bitfieldDeinterleave(glm::uint32 x) + { + glm::uint32 REG1(x); + glm::uint32 REG2(x >>= 1); + + REG1 = REG1 & static_cast(0x55555555); + REG2 = REG2 & static_cast(0x55555555); + + REG1 = ((REG1 >> 1) | REG1) & static_cast(0x33333333); + REG2 = ((REG2 >> 1) | REG2) & static_cast(0x33333333); + + REG1 = ((REG1 >> 2) | REG1) & static_cast(0x0F0F0F0F); + REG2 = ((REG2 >> 2) | REG2) & static_cast(0x0F0F0F0F); + + REG1 = ((REG1 >> 4) | REG1) & static_cast(0x00FF00FF); + REG2 = ((REG2 >> 4) | REG2) & static_cast(0x00FF00FF); + + REG1 = ((REG1 >> 8) | REG1) & static_cast(0x0000FFFF); + REG2 = ((REG2 >> 8) | REG2) & static_cast(0x0000FFFF); + + return glm::u16vec2(REG1, REG2); + } + + GLM_FUNC_QUALIFIER int64 bitfieldInterleave(int32 x, int32 y) + { + union sign32 + { + int32 i; + uint32 u; + } sign_x, sign_y; + + union sign64 + { + int64 i; + uint64 u; + } result; + + sign_x.i = x; + sign_y.i = y; + result.u = bitfieldInterleave(sign_x.u, sign_y.u); + + return result.i; + } + + GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(uint32 x, uint32 y) + { + return detail::bitfieldInterleave(x, y); + } + + GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(u32vec2 const& v) + { + return detail::bitfieldInterleave(v.x, v.y); + } + + GLM_FUNC_QUALIFIER glm::u32vec2 bitfieldDeinterleave(glm::uint64 x) + { + glm::uint64 REG1(x); + glm::uint64 REG2(x >>= 1); + + REG1 = REG1 & static_cast(0x5555555555555555ull); + REG2 = REG2 & static_cast(0x5555555555555555ull); + + REG1 = ((REG1 >> 1) | REG1) & static_cast(0x3333333333333333ull); + REG2 = ((REG2 >> 1) | REG2) & static_cast(0x3333333333333333ull); + + REG1 = ((REG1 >> 2) | REG1) & static_cast(0x0F0F0F0F0F0F0F0Full); + REG2 = ((REG2 >> 2) | REG2) & static_cast(0x0F0F0F0F0F0F0F0Full); + + REG1 = ((REG1 >> 4) | REG1) & static_cast(0x00FF00FF00FF00FFull); + REG2 = ((REG2 >> 4) | REG2) & static_cast(0x00FF00FF00FF00FFull); + + REG1 = ((REG1 >> 8) | REG1) & static_cast(0x0000FFFF0000FFFFull); + REG2 = ((REG2 >> 8) | REG2) & static_cast(0x0000FFFF0000FFFFull); + + REG1 = ((REG1 >> 16) | REG1) & static_cast(0x00000000FFFFFFFFull); + REG2 = ((REG2 >> 16) | REG2) & static_cast(0x00000000FFFFFFFFull); + + return glm::u32vec2(REG1, REG2); + } + + GLM_FUNC_QUALIFIER int32 bitfieldInterleave(int8 x, int8 y, int8 z) + { + union sign8 + { + int8 i; + uint8 u; + } sign_x, sign_y, sign_z; + + union sign32 + { + int32 i; + uint32 u; + } result; + + sign_x.i = x; + sign_y.i = y; + sign_z.i = z; + result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u); + + return result.i; + } + + GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(uint8 x, uint8 y, uint8 z) + { + return detail::bitfieldInterleave(x, y, z); + } + + GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(u8vec3 const& v) + { + return detail::bitfieldInterleave(v.x, v.y, v.z); + } + + GLM_FUNC_QUALIFIER int64 bitfieldInterleave(int16 x, int16 y, int16 z) + { + union sign16 + { + int16 i; + uint16 u; + } sign_x, sign_y, sign_z; + + union sign64 + { + int64 i; + uint64 u; + } result; + + sign_x.i = x; + sign_y.i = y; + sign_z.i = z; + result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u); + + return result.i; + } + + GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(uint16 x, uint16 y, uint16 z) + { + return detail::bitfieldInterleave(x, y, z); + } + + GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(u16vec3 const& v) + { + return detail::bitfieldInterleave(v.x, v.y, v.z); + } + + GLM_FUNC_QUALIFIER int64 bitfieldInterleave(int32 x, int32 y, int32 z) + { + union sign16 + { + int32 i; + uint32 u; + } sign_x, sign_y, sign_z; + + union sign64 + { + int64 i; + uint64 u; + } result; + + sign_x.i = x; + sign_y.i = y; + sign_z.i = z; + result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u); + + return result.i; + } + + GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(uint32 x, uint32 y, uint32 z) + { + return detail::bitfieldInterleave(x, y, z); + } + + GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(u32vec3 const& v) + { + return detail::bitfieldInterleave(v.x, v.y, v.z); + } + + GLM_FUNC_QUALIFIER int32 bitfieldInterleave(int8 x, int8 y, int8 z, int8 w) + { + union sign8 + { + int8 i; + uint8 u; + } sign_x, sign_y, sign_z, sign_w; + + union sign32 + { + int32 i; + uint32 u; + } result; + + sign_x.i = x; + sign_y.i = y; + sign_z.i = z; + sign_w.i = w; + result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u, sign_w.u); + + return result.i; + } + + GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(uint8 x, uint8 y, uint8 z, uint8 w) + { + return detail::bitfieldInterleave(x, y, z, w); + } + + GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(u8vec4 const& v) + { + return detail::bitfieldInterleave(v.x, v.y, v.z, v.w); + } + + GLM_FUNC_QUALIFIER int64 bitfieldInterleave(int16 x, int16 y, int16 z, int16 w) + { + union sign16 + { + int16 i; + uint16 u; + } sign_x, sign_y, sign_z, sign_w; + + union sign64 + { + int64 i; + uint64 u; + } result; + + sign_x.i = x; + sign_y.i = y; + sign_z.i = z; + sign_w.i = w; + result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u, sign_w.u); + + return result.i; + } + + GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(uint16 x, uint16 y, uint16 z, uint16 w) + { + return detail::bitfieldInterleave(x, y, z, w); + } + + GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(u16vec4 const& v) + { + return detail::bitfieldInterleave(v.x, v.y, v.z, v.w); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/color_space.hpp b/MacroLibX/third_party/glm/gtc/color_space.hpp new file mode 100644 index 0000000..cffd9f0 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/color_space.hpp @@ -0,0 +1,56 @@ +/// @ref gtc_color_space +/// @file glm/gtc/color_space.hpp +/// +/// @see core (dependence) +/// @see gtc_color_space (dependence) +/// +/// @defgroup gtc_color_space GLM_GTC_color_space +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Allow to perform bit operations on integer values + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../exponential.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_color_space extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_color_space + /// @{ + + /// Convert a linear color to sRGB color using a standard gamma correction. + /// IEC 61966-2-1:1999 / Rec. 709 specification https://www.w3.org/Graphics/Color/srgb + template + GLM_FUNC_DECL vec convertLinearToSRGB(vec const& ColorLinear); + + /// Convert a linear color to sRGB color using a custom gamma correction. + /// IEC 61966-2-1:1999 / Rec. 709 specification https://www.w3.org/Graphics/Color/srgb + template + GLM_FUNC_DECL vec convertLinearToSRGB(vec const& ColorLinear, T Gamma); + + /// Convert a sRGB color to linear color using a standard gamma correction. + /// IEC 61966-2-1:1999 / Rec. 709 specification https://www.w3.org/Graphics/Color/srgb + template + GLM_FUNC_DECL vec convertSRGBToLinear(vec const& ColorSRGB); + + /// Convert a sRGB color to linear color using a custom gamma correction. + // IEC 61966-2-1:1999 / Rec. 709 specification https://www.w3.org/Graphics/Color/srgb + template + GLM_FUNC_DECL vec convertSRGBToLinear(vec const& ColorSRGB, T Gamma); + + /// @} +} //namespace glm + +#include "color_space.inl" diff --git a/MacroLibX/third_party/glm/gtc/color_space.inl b/MacroLibX/third_party/glm/gtc/color_space.inl new file mode 100644 index 0000000..2a90004 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/color_space.inl @@ -0,0 +1,84 @@ +/// @ref gtc_color_space + +namespace glm{ +namespace detail +{ + template + struct compute_rgbToSrgb + { + GLM_FUNC_QUALIFIER static vec call(vec const& ColorRGB, T GammaCorrection) + { + vec const ClampedColor(clamp(ColorRGB, static_cast(0), static_cast(1))); + + return mix( + pow(ClampedColor, vec(GammaCorrection)) * static_cast(1.055) - static_cast(0.055), + ClampedColor * static_cast(12.92), + lessThan(ClampedColor, vec(static_cast(0.0031308)))); + } + }; + + template + struct compute_rgbToSrgb<4, T, Q> + { + GLM_FUNC_QUALIFIER static vec<4, T, Q> call(vec<4, T, Q> const& ColorRGB, T GammaCorrection) + { + return vec<4, T, Q>(compute_rgbToSrgb<3, T, Q>::call(vec<3, T, Q>(ColorRGB), GammaCorrection), ColorRGB.w); + } + }; + + template + struct compute_srgbToRgb + { + GLM_FUNC_QUALIFIER static vec call(vec const& ColorSRGB, T Gamma) + { + return mix( + pow((ColorSRGB + static_cast(0.055)) * static_cast(0.94786729857819905213270142180095), vec(Gamma)), + ColorSRGB * static_cast(0.07739938080495356037151702786378), + lessThanEqual(ColorSRGB, vec(static_cast(0.04045)))); + } + }; + + template + struct compute_srgbToRgb<4, T, Q> + { + GLM_FUNC_QUALIFIER static vec<4, T, Q> call(vec<4, T, Q> const& ColorSRGB, T Gamma) + { + return vec<4, T, Q>(compute_srgbToRgb<3, T, Q>::call(vec<3, T, Q>(ColorSRGB), Gamma), ColorSRGB.w); + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER vec convertLinearToSRGB(vec const& ColorLinear) + { + return detail::compute_rgbToSrgb::call(ColorLinear, static_cast(0.41666)); + } + + // Based on Ian Taylor http://chilliant.blogspot.fr/2012/08/srgb-approximations-for-hlsl.html + template<> + GLM_FUNC_QUALIFIER vec<3, float, lowp> convertLinearToSRGB(vec<3, float, lowp> const& ColorLinear) + { + vec<3, float, lowp> S1 = sqrt(ColorLinear); + vec<3, float, lowp> S2 = sqrt(S1); + vec<3, float, lowp> S3 = sqrt(S2); + return 0.662002687f * S1 + 0.684122060f * S2 - 0.323583601f * S3 - 0.0225411470f * ColorLinear; + } + + template + GLM_FUNC_QUALIFIER vec convertLinearToSRGB(vec const& ColorLinear, T Gamma) + { + return detail::compute_rgbToSrgb::call(ColorLinear, static_cast(1) / Gamma); + } + + template + GLM_FUNC_QUALIFIER vec convertSRGBToLinear(vec const& ColorSRGB) + { + return detail::compute_srgbToRgb::call(ColorSRGB, static_cast(2.4)); + } + + template + GLM_FUNC_QUALIFIER vec convertSRGBToLinear(vec const& ColorSRGB, T Gamma) + { + return detail::compute_srgbToRgb::call(ColorSRGB, Gamma); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/constants.hpp b/MacroLibX/third_party/glm/gtc/constants.hpp new file mode 100644 index 0000000..99f2128 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/constants.hpp @@ -0,0 +1,165 @@ +/// @ref gtc_constants +/// @file glm/gtc/constants.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_constants GLM_GTC_constants +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Provide a list of constants and precomputed useful values. + +#pragma once + +// Dependencies +#include "../ext/scalar_constants.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_constants extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_constants + /// @{ + + /// Return 0. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType zero(); + + /// Return 1. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType one(); + + /// Return pi * 2. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType two_pi(); + + /// Return square root of pi. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType root_pi(); + + /// Return pi / 2. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType half_pi(); + + /// Return pi / 2 * 3. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType three_over_two_pi(); + + /// Return pi / 4. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType quarter_pi(); + + /// Return 1 / pi. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType one_over_pi(); + + /// Return 1 / (pi * 2). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType one_over_two_pi(); + + /// Return 2 / pi. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType two_over_pi(); + + /// Return 4 / pi. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType four_over_pi(); + + /// Return 2 / sqrt(pi). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType two_over_root_pi(); + + /// Return 1 / sqrt(2). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType one_over_root_two(); + + /// Return sqrt(pi / 2). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType root_half_pi(); + + /// Return sqrt(2 * pi). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType root_two_pi(); + + /// Return sqrt(ln(4)). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType root_ln_four(); + + /// Return e constant. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType e(); + + /// Return Euler's constant. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType euler(); + + /// Return sqrt(2). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType root_two(); + + /// Return sqrt(3). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType root_three(); + + /// Return sqrt(5). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType root_five(); + + /// Return ln(2). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType ln_two(); + + /// Return ln(10). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType ln_ten(); + + /// Return ln(ln(2)). + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType ln_ln_two(); + + /// Return 1 / 3. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType third(); + + /// Return 2 / 3. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType two_thirds(); + + /// Return the golden ratio constant. + /// @see gtc_constants + template + GLM_FUNC_DECL GLM_CONSTEXPR genType golden_ratio(); + + /// @} +} //namespace glm + +#include "constants.inl" diff --git a/MacroLibX/third_party/glm/gtc/constants.inl b/MacroLibX/third_party/glm/gtc/constants.inl new file mode 100644 index 0000000..bb98c6b --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/constants.inl @@ -0,0 +1,167 @@ +/// @ref gtc_constants + +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType zero() + { + return genType(0); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType one() + { + return genType(1); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType two_pi() + { + return genType(6.28318530717958647692528676655900576); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_pi() + { + return genType(1.772453850905516027); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType half_pi() + { + return genType(1.57079632679489661923132169163975144); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType three_over_two_pi() + { + return genType(4.71238898038468985769396507491925432); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType quarter_pi() + { + return genType(0.785398163397448309615660845819875721); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType one_over_pi() + { + return genType(0.318309886183790671537767526745028724); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType one_over_two_pi() + { + return genType(0.159154943091895335768883763372514362); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType two_over_pi() + { + return genType(0.636619772367581343075535053490057448); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType four_over_pi() + { + return genType(1.273239544735162686151070106980114898); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType two_over_root_pi() + { + return genType(1.12837916709551257389615890312154517); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType one_over_root_two() + { + return genType(0.707106781186547524400844362104849039); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_half_pi() + { + return genType(1.253314137315500251); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_two_pi() + { + return genType(2.506628274631000502); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_ln_four() + { + return genType(1.17741002251547469); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType e() + { + return genType(2.71828182845904523536); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType euler() + { + return genType(0.577215664901532860606); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_two() + { + return genType(1.41421356237309504880168872420969808); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_three() + { + return genType(1.73205080756887729352744634150587236); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_five() + { + return genType(2.23606797749978969640917366873127623); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType ln_two() + { + return genType(0.693147180559945309417232121458176568); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType ln_ten() + { + return genType(2.30258509299404568401799145468436421); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType ln_ln_two() + { + return genType(-0.3665129205816643); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType third() + { + return genType(0.3333333333333333333333333333333333333333); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType two_thirds() + { + return genType(0.666666666666666666666666666666666666667); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType golden_ratio() + { + return genType(1.61803398874989484820458683436563811); + } + +} //namespace glm diff --git a/MacroLibX/third_party/glm/gtc/epsilon.hpp b/MacroLibX/third_party/glm/gtc/epsilon.hpp new file mode 100644 index 0000000..640439b --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/epsilon.hpp @@ -0,0 +1,60 @@ +/// @ref gtc_epsilon +/// @file glm/gtc/epsilon.hpp +/// +/// @see core (dependence) +/// @see gtc_quaternion (dependence) +/// +/// @defgroup gtc_epsilon GLM_GTC_epsilon +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Comparison functions for a user defined epsilon values. + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_epsilon extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_epsilon + /// @{ + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is satisfied. + /// + /// @see gtc_epsilon + template + GLM_FUNC_DECL vec epsilonEqual(vec const& x, vec const& y, T const& epsilon); + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is satisfied. + /// + /// @see gtc_epsilon + template + GLM_FUNC_DECL bool epsilonEqual(genType const& x, genType const& y, genType const& epsilon); + + /// Returns the component-wise comparison of |x - y| < epsilon. + /// True if this expression is not satisfied. + /// + /// @see gtc_epsilon + template + GLM_FUNC_DECL vec epsilonNotEqual(vec const& x, vec const& y, T const& epsilon); + + /// Returns the component-wise comparison of |x - y| >= epsilon. + /// True if this expression is not satisfied. + /// + /// @see gtc_epsilon + template + GLM_FUNC_DECL bool epsilonNotEqual(genType const& x, genType const& y, genType const& epsilon); + + /// @} +}//namespace glm + +#include "epsilon.inl" diff --git a/MacroLibX/third_party/glm/gtc/epsilon.inl b/MacroLibX/third_party/glm/gtc/epsilon.inl new file mode 100644 index 0000000..508b9f8 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/epsilon.inl @@ -0,0 +1,80 @@ +/// @ref gtc_epsilon + +// Dependency: +#include "../vector_relational.hpp" +#include "../common.hpp" + +namespace glm +{ + template<> + GLM_FUNC_QUALIFIER bool epsilonEqual + ( + float const& x, + float const& y, + float const& epsilon + ) + { + return abs(x - y) < epsilon; + } + + template<> + GLM_FUNC_QUALIFIER bool epsilonEqual + ( + double const& x, + double const& y, + double const& epsilon + ) + { + return abs(x - y) < epsilon; + } + + template + GLM_FUNC_QUALIFIER vec epsilonEqual(vec const& x, vec const& y, T const& epsilon) + { + return lessThan(abs(x - y), vec(epsilon)); + } + + template + GLM_FUNC_QUALIFIER vec epsilonEqual(vec const& x, vec const& y, vec const& epsilon) + { + return lessThan(abs(x - y), vec(epsilon)); + } + + template<> + GLM_FUNC_QUALIFIER bool epsilonNotEqual(float const& x, float const& y, float const& epsilon) + { + return abs(x - y) >= epsilon; + } + + template<> + GLM_FUNC_QUALIFIER bool epsilonNotEqual(double const& x, double const& y, double const& epsilon) + { + return abs(x - y) >= epsilon; + } + + template + GLM_FUNC_QUALIFIER vec epsilonNotEqual(vec const& x, vec const& y, T const& epsilon) + { + return greaterThanEqual(abs(x - y), vec(epsilon)); + } + + template + GLM_FUNC_QUALIFIER vec epsilonNotEqual(vec const& x, vec const& y, vec const& epsilon) + { + return greaterThanEqual(abs(x - y), vec(epsilon)); + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> epsilonEqual(qua const& x, qua const& y, T const& epsilon) + { + vec<4, T, Q> v(x.x - y.x, x.y - y.y, x.z - y.z, x.w - y.w); + return lessThan(abs(v), vec<4, T, Q>(epsilon)); + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> epsilonNotEqual(qua const& x, qua const& y, T const& epsilon) + { + vec<4, T, Q> v(x.x - y.x, x.y - y.y, x.z - y.z, x.w - y.w); + return greaterThanEqual(abs(v), vec<4, T, Q>(epsilon)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/integer.hpp b/MacroLibX/third_party/glm/gtc/integer.hpp new file mode 100644 index 0000000..64ce10b --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/integer.hpp @@ -0,0 +1,65 @@ +/// @ref gtc_integer +/// @file glm/gtc/integer.hpp +/// +/// @see core (dependence) +/// @see gtc_integer (dependence) +/// +/// @defgroup gtc_integer GLM_GTC_integer +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// @brief Allow to perform bit operations on integer values + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../common.hpp" +#include "../integer.hpp" +#include "../exponential.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_integer extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_integer + /// @{ + + /// Returns the log2 of x for integer values. Usefull to compute mipmap count from the texture size. + /// @see gtc_integer + template + GLM_FUNC_DECL genIUType log2(genIUType x); + + /// Returns a value equal to the nearest integer to x. + /// The fraction 0.5 will round in a direction chosen by the + /// implementation, presumably the direction that is fastest. + /// + /// @param x The values of the argument must be greater or equal to zero. + /// @tparam T floating point scalar types. + /// + /// @see GLSL round man page + /// @see gtc_integer + template + GLM_FUNC_DECL vec iround(vec const& x); + + /// Returns a value equal to the nearest integer to x. + /// The fraction 0.5 will round in a direction chosen by the + /// implementation, presumably the direction that is fastest. + /// + /// @param x The values of the argument must be greater or equal to zero. + /// @tparam T floating point scalar types. + /// + /// @see GLSL round man page + /// @see gtc_integer + template + GLM_FUNC_DECL vec uround(vec const& x); + + /// @} +} //namespace glm + +#include "integer.inl" diff --git a/MacroLibX/third_party/glm/gtc/integer.inl b/MacroLibX/third_party/glm/gtc/integer.inl new file mode 100644 index 0000000..f0a8b4f --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/integer.inl @@ -0,0 +1,68 @@ +/// @ref gtc_integer + +namespace glm{ +namespace detail +{ + template + struct compute_log2 + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + //Equivalent to return findMSB(vec); but save one function call in ASM with VC + //return findMSB(vec); + return vec(detail::compute_findMSB_vec::call(v)); + } + }; + +# if GLM_HAS_BITSCAN_WINDOWS + template + struct compute_log2<4, int, Q, false, Aligned> + { + GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& v) + { + vec<4, int, Q> Result; + _BitScanReverse(reinterpret_cast(&Result.x), v.x); + _BitScanReverse(reinterpret_cast(&Result.y), v.y); + _BitScanReverse(reinterpret_cast(&Result.z), v.z); + _BitScanReverse(reinterpret_cast(&Result.w), v.w); + return Result; + } + }; +# endif//GLM_HAS_BITSCAN_WINDOWS +}//namespace detail + template + GLM_FUNC_QUALIFIER int iround(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'iround' only accept floating-point inputs"); + assert(static_cast(0.0) <= x); + + return static_cast(x + static_cast(0.5)); + } + + template + GLM_FUNC_QUALIFIER vec iround(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'iround' only accept floating-point inputs"); + assert(all(lessThanEqual(vec(0), x))); + + return vec(x + static_cast(0.5)); + } + + template + GLM_FUNC_QUALIFIER uint uround(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'uround' only accept floating-point inputs"); + assert(static_cast(0.0) <= x); + + return static_cast(x + static_cast(0.5)); + } + + template + GLM_FUNC_QUALIFIER vec uround(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'uround' only accept floating-point inputs"); + assert(all(lessThanEqual(vec(0), x))); + + return vec(x + static_cast(0.5)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/matrix_access.hpp b/MacroLibX/third_party/glm/gtc/matrix_access.hpp new file mode 100644 index 0000000..4935ba7 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/matrix_access.hpp @@ -0,0 +1,60 @@ +/// @ref gtc_matrix_access +/// @file glm/gtc/matrix_access.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_matrix_access GLM_GTC_matrix_access +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Defines functions to access rows or columns of a matrix easily. + +#pragma once + +// Dependency: +#include "../detail/setup.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_matrix_access extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_matrix_access + /// @{ + + /// Get a specific row of a matrix. + /// @see gtc_matrix_access + template + GLM_FUNC_DECL typename genType::row_type row( + genType const& m, + length_t index); + + /// Set a specific row to a matrix. + /// @see gtc_matrix_access + template + GLM_FUNC_DECL genType row( + genType const& m, + length_t index, + typename genType::row_type const& x); + + /// Get a specific column of a matrix. + /// @see gtc_matrix_access + template + GLM_FUNC_DECL typename genType::col_type column( + genType const& m, + length_t index); + + /// Set a specific column to a matrix. + /// @see gtc_matrix_access + template + GLM_FUNC_DECL genType column( + genType const& m, + length_t index, + typename genType::col_type const& x); + + /// @} +}//namespace glm + +#include "matrix_access.inl" diff --git a/MacroLibX/third_party/glm/gtc/matrix_access.inl b/MacroLibX/third_party/glm/gtc/matrix_access.inl new file mode 100644 index 0000000..09fcc10 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/matrix_access.inl @@ -0,0 +1,62 @@ +/// @ref gtc_matrix_access + +namespace glm +{ + template + GLM_FUNC_QUALIFIER genType row + ( + genType const& m, + length_t index, + typename genType::row_type const& x + ) + { + assert(index >= 0 && index < m[0].length()); + + genType Result = m; + for(length_t i = 0; i < m.length(); ++i) + Result[i][index] = x[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER typename genType::row_type row + ( + genType const& m, + length_t index + ) + { + assert(index >= 0 && index < m[0].length()); + + typename genType::row_type Result(0); + for(length_t i = 0; i < m.length(); ++i) + Result[i] = m[i][index]; + return Result; + } + + template + GLM_FUNC_QUALIFIER genType column + ( + genType const& m, + length_t index, + typename genType::col_type const& x + ) + { + assert(index >= 0 && index < m.length()); + + genType Result = m; + Result[index] = x; + return Result; + } + + template + GLM_FUNC_QUALIFIER typename genType::col_type column + ( + genType const& m, + length_t index + ) + { + assert(index >= 0 && index < m.length()); + + return m[index]; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/matrix_integer.hpp b/MacroLibX/third_party/glm/gtc/matrix_integer.hpp new file mode 100644 index 0000000..557a977 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/matrix_integer.hpp @@ -0,0 +1,487 @@ +/// @ref gtc_matrix_integer +/// @file glm/gtc/matrix_integer.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_matrix_integer GLM_GTC_matrix_integer +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Defines a number of matrices with integer types. + +#pragma once + +// Dependency: +#include "../mat2x2.hpp" +#include "../mat2x3.hpp" +#include "../mat2x4.hpp" +#include "../mat3x2.hpp" +#include "../mat3x3.hpp" +#include "../mat3x4.hpp" +#include "../mat4x2.hpp" +#include "../mat4x3.hpp" +#include "../mat4x4.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_matrix_integer extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_matrix_integer + /// @{ + + /// High-qualifier signed integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, int, highp> highp_imat2; + + /// High-qualifier signed integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, int, highp> highp_imat3; + + /// High-qualifier signed integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, int, highp> highp_imat4; + + /// High-qualifier signed integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, int, highp> highp_imat2x2; + + /// High-qualifier signed integer 2x3 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 3, int, highp> highp_imat2x3; + + /// High-qualifier signed integer 2x4 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 4, int, highp> highp_imat2x4; + + /// High-qualifier signed integer 3x2 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 2, int, highp> highp_imat3x2; + + /// High-qualifier signed integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, int, highp> highp_imat3x3; + + /// High-qualifier signed integer 3x4 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 4, int, highp> highp_imat3x4; + + /// High-qualifier signed integer 4x2 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 2, int, highp> highp_imat4x2; + + /// High-qualifier signed integer 4x3 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 3, int, highp> highp_imat4x3; + + /// High-qualifier signed integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, int, highp> highp_imat4x4; + + + /// Medium-qualifier signed integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, int, mediump> mediump_imat2; + + /// Medium-qualifier signed integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, int, mediump> mediump_imat3; + + /// Medium-qualifier signed integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, int, mediump> mediump_imat4; + + + /// Medium-qualifier signed integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, int, mediump> mediump_imat2x2; + + /// Medium-qualifier signed integer 2x3 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 3, int, mediump> mediump_imat2x3; + + /// Medium-qualifier signed integer 2x4 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 4, int, mediump> mediump_imat2x4; + + /// Medium-qualifier signed integer 3x2 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 2, int, mediump> mediump_imat3x2; + + /// Medium-qualifier signed integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, int, mediump> mediump_imat3x3; + + /// Medium-qualifier signed integer 3x4 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 4, int, mediump> mediump_imat3x4; + + /// Medium-qualifier signed integer 4x2 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 2, int, mediump> mediump_imat4x2; + + /// Medium-qualifier signed integer 4x3 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 3, int, mediump> mediump_imat4x3; + + /// Medium-qualifier signed integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, int, mediump> mediump_imat4x4; + + + /// Low-qualifier signed integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, int, lowp> lowp_imat2; + + /// Low-qualifier signed integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, int, lowp> lowp_imat3; + + /// Low-qualifier signed integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, int, lowp> lowp_imat4; + + + /// Low-qualifier signed integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, int, lowp> lowp_imat2x2; + + /// Low-qualifier signed integer 2x3 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 3, int, lowp> lowp_imat2x3; + + /// Low-qualifier signed integer 2x4 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 4, int, lowp> lowp_imat2x4; + + /// Low-qualifier signed integer 3x2 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 2, int, lowp> lowp_imat3x2; + + /// Low-qualifier signed integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, int, lowp> lowp_imat3x3; + + /// Low-qualifier signed integer 3x4 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 4, int, lowp> lowp_imat3x4; + + /// Low-qualifier signed integer 4x2 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 2, int, lowp> lowp_imat4x2; + + /// Low-qualifier signed integer 4x3 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 3, int, lowp> lowp_imat4x3; + + /// Low-qualifier signed integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, int, lowp> lowp_imat4x4; + + + /// High-qualifier unsigned integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, uint, highp> highp_umat2; + + /// High-qualifier unsigned integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, uint, highp> highp_umat3; + + /// High-qualifier unsigned integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, uint, highp> highp_umat4; + + /// High-qualifier unsigned integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, uint, highp> highp_umat2x2; + + /// High-qualifier unsigned integer 2x3 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 3, uint, highp> highp_umat2x3; + + /// High-qualifier unsigned integer 2x4 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 4, uint, highp> highp_umat2x4; + + /// High-qualifier unsigned integer 3x2 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 2, uint, highp> highp_umat3x2; + + /// High-qualifier unsigned integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, uint, highp> highp_umat3x3; + + /// High-qualifier unsigned integer 3x4 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 4, uint, highp> highp_umat3x4; + + /// High-qualifier unsigned integer 4x2 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 2, uint, highp> highp_umat4x2; + + /// High-qualifier unsigned integer 4x3 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 3, uint, highp> highp_umat4x3; + + /// High-qualifier unsigned integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, uint, highp> highp_umat4x4; + + + /// Medium-qualifier unsigned integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, uint, mediump> mediump_umat2; + + /// Medium-qualifier unsigned integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, uint, mediump> mediump_umat3; + + /// Medium-qualifier unsigned integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, uint, mediump> mediump_umat4; + + + /// Medium-qualifier unsigned integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, uint, mediump> mediump_umat2x2; + + /// Medium-qualifier unsigned integer 2x3 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 3, uint, mediump> mediump_umat2x3; + + /// Medium-qualifier unsigned integer 2x4 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 4, uint, mediump> mediump_umat2x4; + + /// Medium-qualifier unsigned integer 3x2 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 2, uint, mediump> mediump_umat3x2; + + /// Medium-qualifier unsigned integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, uint, mediump> mediump_umat3x3; + + /// Medium-qualifier unsigned integer 3x4 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 4, uint, mediump> mediump_umat3x4; + + /// Medium-qualifier unsigned integer 4x2 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 2, uint, mediump> mediump_umat4x2; + + /// Medium-qualifier unsigned integer 4x3 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 3, uint, mediump> mediump_umat4x3; + + /// Medium-qualifier unsigned integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, uint, mediump> mediump_umat4x4; + + + /// Low-qualifier unsigned integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, uint, lowp> lowp_umat2; + + /// Low-qualifier unsigned integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, uint, lowp> lowp_umat3; + + /// Low-qualifier unsigned integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, uint, lowp> lowp_umat4; + + + /// Low-qualifier unsigned integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 2, uint, lowp> lowp_umat2x2; + + /// Low-qualifier unsigned integer 2x3 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 3, uint, lowp> lowp_umat2x3; + + /// Low-qualifier unsigned integer 2x4 matrix. + /// @see gtc_matrix_integer + typedef mat<2, 4, uint, lowp> lowp_umat2x4; + + /// Low-qualifier unsigned integer 3x2 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 2, uint, lowp> lowp_umat3x2; + + /// Low-qualifier unsigned integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 3, uint, lowp> lowp_umat3x3; + + /// Low-qualifier unsigned integer 3x4 matrix. + /// @see gtc_matrix_integer + typedef mat<3, 4, uint, lowp> lowp_umat3x4; + + /// Low-qualifier unsigned integer 4x2 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 2, uint, lowp> lowp_umat4x2; + + /// Low-qualifier unsigned integer 4x3 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 3, uint, lowp> lowp_umat4x3; + + /// Low-qualifier unsigned integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mat<4, 4, uint, lowp> lowp_umat4x4; + +#if(defined(GLM_PRECISION_HIGHP_INT)) + typedef highp_imat2 imat2; + typedef highp_imat3 imat3; + typedef highp_imat4 imat4; + typedef highp_imat2x2 imat2x2; + typedef highp_imat2x3 imat2x3; + typedef highp_imat2x4 imat2x4; + typedef highp_imat3x2 imat3x2; + typedef highp_imat3x3 imat3x3; + typedef highp_imat3x4 imat3x4; + typedef highp_imat4x2 imat4x2; + typedef highp_imat4x3 imat4x3; + typedef highp_imat4x4 imat4x4; +#elif(defined(GLM_PRECISION_LOWP_INT)) + typedef lowp_imat2 imat2; + typedef lowp_imat3 imat3; + typedef lowp_imat4 imat4; + typedef lowp_imat2x2 imat2x2; + typedef lowp_imat2x3 imat2x3; + typedef lowp_imat2x4 imat2x4; + typedef lowp_imat3x2 imat3x2; + typedef lowp_imat3x3 imat3x3; + typedef lowp_imat3x4 imat3x4; + typedef lowp_imat4x2 imat4x2; + typedef lowp_imat4x3 imat4x3; + typedef lowp_imat4x4 imat4x4; +#else //if(defined(GLM_PRECISION_MEDIUMP_INT)) + + /// Signed integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat2 imat2; + + /// Signed integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat3 imat3; + + /// Signed integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat4 imat4; + + /// Signed integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat2x2 imat2x2; + + /// Signed integer 2x3 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat2x3 imat2x3; + + /// Signed integer 2x4 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat2x4 imat2x4; + + /// Signed integer 3x2 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat3x2 imat3x2; + + /// Signed integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat3x3 imat3x3; + + /// Signed integer 3x4 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat3x4 imat3x4; + + /// Signed integer 4x2 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat4x2 imat4x2; + + /// Signed integer 4x3 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat4x3 imat4x3; + + /// Signed integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mediump_imat4x4 imat4x4; +#endif//GLM_PRECISION + +#if(defined(GLM_PRECISION_HIGHP_UINT)) + typedef highp_umat2 umat2; + typedef highp_umat3 umat3; + typedef highp_umat4 umat4; + typedef highp_umat2x2 umat2x2; + typedef highp_umat2x3 umat2x3; + typedef highp_umat2x4 umat2x4; + typedef highp_umat3x2 umat3x2; + typedef highp_umat3x3 umat3x3; + typedef highp_umat3x4 umat3x4; + typedef highp_umat4x2 umat4x2; + typedef highp_umat4x3 umat4x3; + typedef highp_umat4x4 umat4x4; +#elif(defined(GLM_PRECISION_LOWP_UINT)) + typedef lowp_umat2 umat2; + typedef lowp_umat3 umat3; + typedef lowp_umat4 umat4; + typedef lowp_umat2x2 umat2x2; + typedef lowp_umat2x3 umat2x3; + typedef lowp_umat2x4 umat2x4; + typedef lowp_umat3x2 umat3x2; + typedef lowp_umat3x3 umat3x3; + typedef lowp_umat3x4 umat3x4; + typedef lowp_umat4x2 umat4x2; + typedef lowp_umat4x3 umat4x3; + typedef lowp_umat4x4 umat4x4; +#else //if(defined(GLM_PRECISION_MEDIUMP_UINT)) + + /// Unsigned integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat2 umat2; + + /// Unsigned integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat3 umat3; + + /// Unsigned integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat4 umat4; + + /// Unsigned integer 2x2 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat2x2 umat2x2; + + /// Unsigned integer 2x3 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat2x3 umat2x3; + + /// Unsigned integer 2x4 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat2x4 umat2x4; + + /// Unsigned integer 3x2 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat3x2 umat3x2; + + /// Unsigned integer 3x3 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat3x3 umat3x3; + + /// Unsigned integer 3x4 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat3x4 umat3x4; + + /// Unsigned integer 4x2 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat4x2 umat4x2; + + /// Unsigned integer 4x3 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat4x3 umat4x3; + + /// Unsigned integer 4x4 matrix. + /// @see gtc_matrix_integer + typedef mediump_umat4x4 umat4x4; +#endif//GLM_PRECISION + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/matrix_inverse.hpp b/MacroLibX/third_party/glm/gtc/matrix_inverse.hpp new file mode 100644 index 0000000..a1900ad --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/matrix_inverse.hpp @@ -0,0 +1,50 @@ +/// @ref gtc_matrix_inverse +/// @file glm/gtc/matrix_inverse.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_matrix_inverse GLM_GTC_matrix_inverse +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Defines additional matrix inverting functions. + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../matrix.hpp" +#include "../mat2x2.hpp" +#include "../mat3x3.hpp" +#include "../mat4x4.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_matrix_inverse extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_matrix_inverse + /// @{ + + /// Fast matrix inverse for affine matrix. + /// + /// @param m Input matrix to invert. + /// @tparam genType Squared floating-point matrix: half, float or double. Inverse of matrix based of half-qualifier floating point value is highly innacurate. + /// @see gtc_matrix_inverse + template + GLM_FUNC_DECL genType affineInverse(genType const& m); + + /// Compute the inverse transpose of a matrix. + /// + /// @param m Input matrix to invert transpose. + /// @tparam genType Squared floating-point matrix: half, float or double. Inverse of matrix based of half-qualifier floating point value is highly innacurate. + /// @see gtc_matrix_inverse + template + GLM_FUNC_DECL genType inverseTranspose(genType const& m); + + /// @} +}//namespace glm + +#include "matrix_inverse.inl" diff --git a/MacroLibX/third_party/glm/gtc/matrix_inverse.inl b/MacroLibX/third_party/glm/gtc/matrix_inverse.inl new file mode 100644 index 0000000..c004b9e --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/matrix_inverse.inl @@ -0,0 +1,118 @@ +/// @ref gtc_matrix_inverse + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> affineInverse(mat<3, 3, T, Q> const& m) + { + mat<2, 2, T, Q> const Inv(inverse(mat<2, 2, T, Q>(m))); + + return mat<3, 3, T, Q>( + vec<3, T, Q>(Inv[0], static_cast(0)), + vec<3, T, Q>(Inv[1], static_cast(0)), + vec<3, T, Q>(-Inv * vec<2, T, Q>(m[2]), static_cast(1))); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> affineInverse(mat<4, 4, T, Q> const& m) + { + mat<3, 3, T, Q> const Inv(inverse(mat<3, 3, T, Q>(m))); + + return mat<4, 4, T, Q>( + vec<4, T, Q>(Inv[0], static_cast(0)), + vec<4, T, Q>(Inv[1], static_cast(0)), + vec<4, T, Q>(Inv[2], static_cast(0)), + vec<4, T, Q>(-Inv * vec<3, T, Q>(m[3]), static_cast(1))); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> inverseTranspose(mat<2, 2, T, Q> const& m) + { + T Determinant = m[0][0] * m[1][1] - m[1][0] * m[0][1]; + + mat<2, 2, T, Q> Inverse( + + m[1][1] / Determinant, + - m[0][1] / Determinant, + - m[1][0] / Determinant, + + m[0][0] / Determinant); + + return Inverse; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> inverseTranspose(mat<3, 3, T, Q> const& m) + { + T Determinant = + + m[0][0] * (m[1][1] * m[2][2] - m[1][2] * m[2][1]) + - m[0][1] * (m[1][0] * m[2][2] - m[1][2] * m[2][0]) + + m[0][2] * (m[1][0] * m[2][1] - m[1][1] * m[2][0]); + + mat<3, 3, T, Q> Inverse; + Inverse[0][0] = + (m[1][1] * m[2][2] - m[2][1] * m[1][2]); + Inverse[0][1] = - (m[1][0] * m[2][2] - m[2][0] * m[1][2]); + Inverse[0][2] = + (m[1][0] * m[2][1] - m[2][0] * m[1][1]); + Inverse[1][0] = - (m[0][1] * m[2][2] - m[2][1] * m[0][2]); + Inverse[1][1] = + (m[0][0] * m[2][2] - m[2][0] * m[0][2]); + Inverse[1][2] = - (m[0][0] * m[2][1] - m[2][0] * m[0][1]); + Inverse[2][0] = + (m[0][1] * m[1][2] - m[1][1] * m[0][2]); + Inverse[2][1] = - (m[0][0] * m[1][2] - m[1][0] * m[0][2]); + Inverse[2][2] = + (m[0][0] * m[1][1] - m[1][0] * m[0][1]); + Inverse /= Determinant; + + return Inverse; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> inverseTranspose(mat<4, 4, T, Q> const& m) + { + T SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + T SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + T SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + T SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + T SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + T SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + T SubFactor06 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; + T SubFactor07 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; + T SubFactor08 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; + T SubFactor09 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; + T SubFactor10 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; + T SubFactor11 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; + T SubFactor12 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; + T SubFactor13 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; + T SubFactor14 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; + T SubFactor15 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; + T SubFactor16 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; + T SubFactor17 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; + + mat<4, 4, T, Q> Inverse; + Inverse[0][0] = + (m[1][1] * SubFactor00 - m[1][2] * SubFactor01 + m[1][3] * SubFactor02); + Inverse[0][1] = - (m[1][0] * SubFactor00 - m[1][2] * SubFactor03 + m[1][3] * SubFactor04); + Inverse[0][2] = + (m[1][0] * SubFactor01 - m[1][1] * SubFactor03 + m[1][3] * SubFactor05); + Inverse[0][3] = - (m[1][0] * SubFactor02 - m[1][1] * SubFactor04 + m[1][2] * SubFactor05); + + Inverse[1][0] = - (m[0][1] * SubFactor00 - m[0][2] * SubFactor01 + m[0][3] * SubFactor02); + Inverse[1][1] = + (m[0][0] * SubFactor00 - m[0][2] * SubFactor03 + m[0][3] * SubFactor04); + Inverse[1][2] = - (m[0][0] * SubFactor01 - m[0][1] * SubFactor03 + m[0][3] * SubFactor05); + Inverse[1][3] = + (m[0][0] * SubFactor02 - m[0][1] * SubFactor04 + m[0][2] * SubFactor05); + + Inverse[2][0] = + (m[0][1] * SubFactor06 - m[0][2] * SubFactor07 + m[0][3] * SubFactor08); + Inverse[2][1] = - (m[0][0] * SubFactor06 - m[0][2] * SubFactor09 + m[0][3] * SubFactor10); + Inverse[2][2] = + (m[0][0] * SubFactor07 - m[0][1] * SubFactor09 + m[0][3] * SubFactor11); + Inverse[2][3] = - (m[0][0] * SubFactor08 - m[0][1] * SubFactor10 + m[0][2] * SubFactor11); + + Inverse[3][0] = - (m[0][1] * SubFactor12 - m[0][2] * SubFactor13 + m[0][3] * SubFactor14); + Inverse[3][1] = + (m[0][0] * SubFactor12 - m[0][2] * SubFactor15 + m[0][3] * SubFactor16); + Inverse[3][2] = - (m[0][0] * SubFactor13 - m[0][1] * SubFactor15 + m[0][3] * SubFactor17); + Inverse[3][3] = + (m[0][0] * SubFactor14 - m[0][1] * SubFactor16 + m[0][2] * SubFactor17); + + T Determinant = + + m[0][0] * Inverse[0][0] + + m[0][1] * Inverse[0][1] + + m[0][2] * Inverse[0][2] + + m[0][3] * Inverse[0][3]; + + Inverse /= Determinant; + + return Inverse; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/matrix_transform.hpp b/MacroLibX/third_party/glm/gtc/matrix_transform.hpp new file mode 100644 index 0000000..612418f --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/matrix_transform.hpp @@ -0,0 +1,36 @@ +/// @ref gtc_matrix_transform +/// @file glm/gtc/matrix_transform.hpp +/// +/// @see core (dependence) +/// @see gtx_transform +/// @see gtx_transform2 +/// +/// @defgroup gtc_matrix_transform GLM_GTC_matrix_transform +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Defines functions that generate common transformation matrices. +/// +/// The matrices generated by this extension use standard OpenGL fixed-function +/// conventions. For example, the lookAt function generates a transform from world +/// space into the specific eye space that the projective matrix functions +/// (perspective, ortho, etc) are designed to expect. The OpenGL compatibility +/// specifications defines the particular layout of this eye space. + +#pragma once + +// Dependencies +#include "../mat4x4.hpp" +#include "../vec2.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" +#include "../ext/matrix_projection.hpp" +#include "../ext/matrix_clip_space.hpp" +#include "../ext/matrix_transform.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_matrix_transform extension included") +#endif + +#include "matrix_transform.inl" diff --git a/MacroLibX/third_party/glm/gtc/matrix_transform.inl b/MacroLibX/third_party/glm/gtc/matrix_transform.inl new file mode 100644 index 0000000..15b46bc --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/matrix_transform.inl @@ -0,0 +1,3 @@ +#include "../geometric.hpp" +#include "../trigonometric.hpp" +#include "../matrix.hpp" diff --git a/MacroLibX/third_party/glm/gtc/noise.hpp b/MacroLibX/third_party/glm/gtc/noise.hpp new file mode 100644 index 0000000..ab1772e --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/noise.hpp @@ -0,0 +1,61 @@ +/// @ref gtc_noise +/// @file glm/gtc/noise.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_noise GLM_GTC_noise +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Defines 2D, 3D and 4D procedural noise functions +/// Based on the work of Stefan Gustavson and Ashima Arts on "webgl-noise": +/// https://github.com/ashima/webgl-noise +/// Following Stefan Gustavson's paper "Simplex noise demystified": +/// http://www.itn.liu.se/~stegu/simplexnoise/simplexnoise.pdf + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../detail/_noise.hpp" +#include "../geometric.hpp" +#include "../common.hpp" +#include "../vector_relational.hpp" +#include "../vec2.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_noise extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_noise + /// @{ + + /// Classic perlin noise. + /// @see gtc_noise + template + GLM_FUNC_DECL T perlin( + vec const& p); + + /// Periodic perlin noise. + /// @see gtc_noise + template + GLM_FUNC_DECL T perlin( + vec const& p, + vec const& rep); + + /// Simplex noise. + /// @see gtc_noise + template + GLM_FUNC_DECL T simplex( + vec const& p); + + /// @} +}//namespace glm + +#include "noise.inl" diff --git a/MacroLibX/third_party/glm/gtc/noise.inl b/MacroLibX/third_party/glm/gtc/noise.inl new file mode 100644 index 0000000..30d0b27 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/noise.inl @@ -0,0 +1,807 @@ +/// @ref gtc_noise +/// +// Based on the work of Stefan Gustavson and Ashima Arts on "webgl-noise": +// https://github.com/ashima/webgl-noise +// Following Stefan Gustavson's paper "Simplex noise demystified": +// http://www.itn.liu.se/~stegu/simplexnoise/simplexnoise.pdf + +namespace glm{ +namespace gtc +{ + template + GLM_FUNC_QUALIFIER vec<4, T, Q> grad4(T const& j, vec<4, T, Q> const& ip) + { + vec<3, T, Q> pXYZ = floor(fract(vec<3, T, Q>(j) * vec<3, T, Q>(ip)) * T(7)) * ip[2] - T(1); + T pW = static_cast(1.5) - dot(abs(pXYZ), vec<3, T, Q>(1)); + vec<4, T, Q> s = vec<4, T, Q>(lessThan(vec<4, T, Q>(pXYZ, pW), vec<4, T, Q>(0.0))); + pXYZ = pXYZ + (vec<3, T, Q>(s) * T(2) - T(1)) * s.w; + return vec<4, T, Q>(pXYZ, pW); + } +}//namespace gtc + + // Classic Perlin noise + template + GLM_FUNC_QUALIFIER T perlin(vec<2, T, Q> const& Position) + { + vec<4, T, Q> Pi = glm::floor(vec<4, T, Q>(Position.x, Position.y, Position.x, Position.y)) + vec<4, T, Q>(0.0, 0.0, 1.0, 1.0); + vec<4, T, Q> Pf = glm::fract(vec<4, T, Q>(Position.x, Position.y, Position.x, Position.y)) - vec<4, T, Q>(0.0, 0.0, 1.0, 1.0); + Pi = mod(Pi, vec<4, T, Q>(289)); // To avoid truncation effects in permutation + vec<4, T, Q> ix(Pi.x, Pi.z, Pi.x, Pi.z); + vec<4, T, Q> iy(Pi.y, Pi.y, Pi.w, Pi.w); + vec<4, T, Q> fx(Pf.x, Pf.z, Pf.x, Pf.z); + vec<4, T, Q> fy(Pf.y, Pf.y, Pf.w, Pf.w); + + vec<4, T, Q> i = detail::permute(detail::permute(ix) + iy); + + vec<4, T, Q> gx = static_cast(2) * glm::fract(i / T(41)) - T(1); + vec<4, T, Q> gy = glm::abs(gx) - T(0.5); + vec<4, T, Q> tx = glm::floor(gx + T(0.5)); + gx = gx - tx; + + vec<2, T, Q> g00(gx.x, gy.x); + vec<2, T, Q> g10(gx.y, gy.y); + vec<2, T, Q> g01(gx.z, gy.z); + vec<2, T, Q> g11(gx.w, gy.w); + + vec<4, T, Q> norm = detail::taylorInvSqrt(vec<4, T, Q>(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11))); + g00 *= norm.x; + g01 *= norm.y; + g10 *= norm.z; + g11 *= norm.w; + + T n00 = dot(g00, vec<2, T, Q>(fx.x, fy.x)); + T n10 = dot(g10, vec<2, T, Q>(fx.y, fy.y)); + T n01 = dot(g01, vec<2, T, Q>(fx.z, fy.z)); + T n11 = dot(g11, vec<2, T, Q>(fx.w, fy.w)); + + vec<2, T, Q> fade_xy = detail::fade(vec<2, T, Q>(Pf.x, Pf.y)); + vec<2, T, Q> n_x = mix(vec<2, T, Q>(n00, n01), vec<2, T, Q>(n10, n11), fade_xy.x); + T n_xy = mix(n_x.x, n_x.y, fade_xy.y); + return T(2.3) * n_xy; + } + + // Classic Perlin noise + template + GLM_FUNC_QUALIFIER T perlin(vec<3, T, Q> const& Position) + { + vec<3, T, Q> Pi0 = floor(Position); // Integer part for indexing + vec<3, T, Q> Pi1 = Pi0 + T(1); // Integer part + 1 + Pi0 = detail::mod289(Pi0); + Pi1 = detail::mod289(Pi1); + vec<3, T, Q> Pf0 = fract(Position); // Fractional part for interpolation + vec<3, T, Q> Pf1 = Pf0 - T(1); // Fractional part - 1.0 + vec<4, T, Q> ix(Pi0.x, Pi1.x, Pi0.x, Pi1.x); + vec<4, T, Q> iy = vec<4, T, Q>(vec<2, T, Q>(Pi0.y), vec<2, T, Q>(Pi1.y)); + vec<4, T, Q> iz0(Pi0.z); + vec<4, T, Q> iz1(Pi1.z); + + vec<4, T, Q> ixy = detail::permute(detail::permute(ix) + iy); + vec<4, T, Q> ixy0 = detail::permute(ixy + iz0); + vec<4, T, Q> ixy1 = detail::permute(ixy + iz1); + + vec<4, T, Q> gx0 = ixy0 * T(1.0 / 7.0); + vec<4, T, Q> gy0 = fract(floor(gx0) * T(1.0 / 7.0)) - T(0.5); + gx0 = fract(gx0); + vec<4, T, Q> gz0 = vec<4, T, Q>(0.5) - abs(gx0) - abs(gy0); + vec<4, T, Q> sz0 = step(gz0, vec<4, T, Q>(0.0)); + gx0 -= sz0 * (step(T(0), gx0) - T(0.5)); + gy0 -= sz0 * (step(T(0), gy0) - T(0.5)); + + vec<4, T, Q> gx1 = ixy1 * T(1.0 / 7.0); + vec<4, T, Q> gy1 = fract(floor(gx1) * T(1.0 / 7.0)) - T(0.5); + gx1 = fract(gx1); + vec<4, T, Q> gz1 = vec<4, T, Q>(0.5) - abs(gx1) - abs(gy1); + vec<4, T, Q> sz1 = step(gz1, vec<4, T, Q>(0.0)); + gx1 -= sz1 * (step(T(0), gx1) - T(0.5)); + gy1 -= sz1 * (step(T(0), gy1) - T(0.5)); + + vec<3, T, Q> g000(gx0.x, gy0.x, gz0.x); + vec<3, T, Q> g100(gx0.y, gy0.y, gz0.y); + vec<3, T, Q> g010(gx0.z, gy0.z, gz0.z); + vec<3, T, Q> g110(gx0.w, gy0.w, gz0.w); + vec<3, T, Q> g001(gx1.x, gy1.x, gz1.x); + vec<3, T, Q> g101(gx1.y, gy1.y, gz1.y); + vec<3, T, Q> g011(gx1.z, gy1.z, gz1.z); + vec<3, T, Q> g111(gx1.w, gy1.w, gz1.w); + + vec<4, T, Q> norm0 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110))); + g000 *= norm0.x; + g010 *= norm0.y; + g100 *= norm0.z; + g110 *= norm0.w; + vec<4, T, Q> norm1 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g001, g001), dot(g011, g011), dot(g101, g101), dot(g111, g111))); + g001 *= norm1.x; + g011 *= norm1.y; + g101 *= norm1.z; + g111 *= norm1.w; + + T n000 = dot(g000, Pf0); + T n100 = dot(g100, vec<3, T, Q>(Pf1.x, Pf0.y, Pf0.z)); + T n010 = dot(g010, vec<3, T, Q>(Pf0.x, Pf1.y, Pf0.z)); + T n110 = dot(g110, vec<3, T, Q>(Pf1.x, Pf1.y, Pf0.z)); + T n001 = dot(g001, vec<3, T, Q>(Pf0.x, Pf0.y, Pf1.z)); + T n101 = dot(g101, vec<3, T, Q>(Pf1.x, Pf0.y, Pf1.z)); + T n011 = dot(g011, vec<3, T, Q>(Pf0.x, Pf1.y, Pf1.z)); + T n111 = dot(g111, Pf1); + + vec<3, T, Q> fade_xyz = detail::fade(Pf0); + vec<4, T, Q> n_z = mix(vec<4, T, Q>(n000, n100, n010, n110), vec<4, T, Q>(n001, n101, n011, n111), fade_xyz.z); + vec<2, T, Q> n_yz = mix(vec<2, T, Q>(n_z.x, n_z.y), vec<2, T, Q>(n_z.z, n_z.w), fade_xyz.y); + T n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); + return T(2.2) * n_xyz; + } + /* + // Classic Perlin noise + template + GLM_FUNC_QUALIFIER T perlin(vec<3, T, Q> const& P) + { + vec<3, T, Q> Pi0 = floor(P); // Integer part for indexing + vec<3, T, Q> Pi1 = Pi0 + T(1); // Integer part + 1 + Pi0 = mod(Pi0, T(289)); + Pi1 = mod(Pi1, T(289)); + vec<3, T, Q> Pf0 = fract(P); // Fractional part for interpolation + vec<3, T, Q> Pf1 = Pf0 - T(1); // Fractional part - 1.0 + vec<4, T, Q> ix(Pi0.x, Pi1.x, Pi0.x, Pi1.x); + vec<4, T, Q> iy(Pi0.y, Pi0.y, Pi1.y, Pi1.y); + vec<4, T, Q> iz0(Pi0.z); + vec<4, T, Q> iz1(Pi1.z); + + vec<4, T, Q> ixy = permute(permute(ix) + iy); + vec<4, T, Q> ixy0 = permute(ixy + iz0); + vec<4, T, Q> ixy1 = permute(ixy + iz1); + + vec<4, T, Q> gx0 = ixy0 / T(7); + vec<4, T, Q> gy0 = fract(floor(gx0) / T(7)) - T(0.5); + gx0 = fract(gx0); + vec<4, T, Q> gz0 = vec<4, T, Q>(0.5) - abs(gx0) - abs(gy0); + vec<4, T, Q> sz0 = step(gz0, vec<4, T, Q>(0.0)); + gx0 -= sz0 * (step(0.0, gx0) - T(0.5)); + gy0 -= sz0 * (step(0.0, gy0) - T(0.5)); + + vec<4, T, Q> gx1 = ixy1 / T(7); + vec<4, T, Q> gy1 = fract(floor(gx1) / T(7)) - T(0.5); + gx1 = fract(gx1); + vec<4, T, Q> gz1 = vec<4, T, Q>(0.5) - abs(gx1) - abs(gy1); + vec<4, T, Q> sz1 = step(gz1, vec<4, T, Q>(0.0)); + gx1 -= sz1 * (step(T(0), gx1) - T(0.5)); + gy1 -= sz1 * (step(T(0), gy1) - T(0.5)); + + vec<3, T, Q> g000(gx0.x, gy0.x, gz0.x); + vec<3, T, Q> g100(gx0.y, gy0.y, gz0.y); + vec<3, T, Q> g010(gx0.z, gy0.z, gz0.z); + vec<3, T, Q> g110(gx0.w, gy0.w, gz0.w); + vec<3, T, Q> g001(gx1.x, gy1.x, gz1.x); + vec<3, T, Q> g101(gx1.y, gy1.y, gz1.y); + vec<3, T, Q> g011(gx1.z, gy1.z, gz1.z); + vec<3, T, Q> g111(gx1.w, gy1.w, gz1.w); + + vec<4, T, Q> norm0 = taylorInvSqrt(vec<4, T, Q>(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110))); + g000 *= norm0.x; + g010 *= norm0.y; + g100 *= norm0.z; + g110 *= norm0.w; + vec<4, T, Q> norm1 = taylorInvSqrt(vec<4, T, Q>(dot(g001, g001), dot(g011, g011), dot(g101, g101), dot(g111, g111))); + g001 *= norm1.x; + g011 *= norm1.y; + g101 *= norm1.z; + g111 *= norm1.w; + + T n000 = dot(g000, Pf0); + T n100 = dot(g100, vec<3, T, Q>(Pf1.x, Pf0.y, Pf0.z)); + T n010 = dot(g010, vec<3, T, Q>(Pf0.x, Pf1.y, Pf0.z)); + T n110 = dot(g110, vec<3, T, Q>(Pf1.x, Pf1.y, Pf0.z)); + T n001 = dot(g001, vec<3, T, Q>(Pf0.x, Pf0.y, Pf1.z)); + T n101 = dot(g101, vec<3, T, Q>(Pf1.x, Pf0.y, Pf1.z)); + T n011 = dot(g011, vec<3, T, Q>(Pf0.x, Pf1.y, Pf1.z)); + T n111 = dot(g111, Pf1); + + vec<3, T, Q> fade_xyz = fade(Pf0); + vec<4, T, Q> n_z = mix(vec<4, T, Q>(n000, n100, n010, n110), vec<4, T, Q>(n001, n101, n011, n111), fade_xyz.z); + vec<2, T, Q> n_yz = mix( + vec<2, T, Q>(n_z.x, n_z.y), + vec<2, T, Q>(n_z.z, n_z.w), fade_xyz.y); + T n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); + return T(2.2) * n_xyz; + } + */ + // Classic Perlin noise + template + GLM_FUNC_QUALIFIER T perlin(vec<4, T, Q> const& Position) + { + vec<4, T, Q> Pi0 = floor(Position); // Integer part for indexing + vec<4, T, Q> Pi1 = Pi0 + T(1); // Integer part + 1 + Pi0 = mod(Pi0, vec<4, T, Q>(289)); + Pi1 = mod(Pi1, vec<4, T, Q>(289)); + vec<4, T, Q> Pf0 = fract(Position); // Fractional part for interpolation + vec<4, T, Q> Pf1 = Pf0 - T(1); // Fractional part - 1.0 + vec<4, T, Q> ix(Pi0.x, Pi1.x, Pi0.x, Pi1.x); + vec<4, T, Q> iy(Pi0.y, Pi0.y, Pi1.y, Pi1.y); + vec<4, T, Q> iz0(Pi0.z); + vec<4, T, Q> iz1(Pi1.z); + vec<4, T, Q> iw0(Pi0.w); + vec<4, T, Q> iw1(Pi1.w); + + vec<4, T, Q> ixy = detail::permute(detail::permute(ix) + iy); + vec<4, T, Q> ixy0 = detail::permute(ixy + iz0); + vec<4, T, Q> ixy1 = detail::permute(ixy + iz1); + vec<4, T, Q> ixy00 = detail::permute(ixy0 + iw0); + vec<4, T, Q> ixy01 = detail::permute(ixy0 + iw1); + vec<4, T, Q> ixy10 = detail::permute(ixy1 + iw0); + vec<4, T, Q> ixy11 = detail::permute(ixy1 + iw1); + + vec<4, T, Q> gx00 = ixy00 / T(7); + vec<4, T, Q> gy00 = floor(gx00) / T(7); + vec<4, T, Q> gz00 = floor(gy00) / T(6); + gx00 = fract(gx00) - T(0.5); + gy00 = fract(gy00) - T(0.5); + gz00 = fract(gz00) - T(0.5); + vec<4, T, Q> gw00 = vec<4, T, Q>(0.75) - abs(gx00) - abs(gy00) - abs(gz00); + vec<4, T, Q> sw00 = step(gw00, vec<4, T, Q>(0.0)); + gx00 -= sw00 * (step(T(0), gx00) - T(0.5)); + gy00 -= sw00 * (step(T(0), gy00) - T(0.5)); + + vec<4, T, Q> gx01 = ixy01 / T(7); + vec<4, T, Q> gy01 = floor(gx01) / T(7); + vec<4, T, Q> gz01 = floor(gy01) / T(6); + gx01 = fract(gx01) - T(0.5); + gy01 = fract(gy01) - T(0.5); + gz01 = fract(gz01) - T(0.5); + vec<4, T, Q> gw01 = vec<4, T, Q>(0.75) - abs(gx01) - abs(gy01) - abs(gz01); + vec<4, T, Q> sw01 = step(gw01, vec<4, T, Q>(0.0)); + gx01 -= sw01 * (step(T(0), gx01) - T(0.5)); + gy01 -= sw01 * (step(T(0), gy01) - T(0.5)); + + vec<4, T, Q> gx10 = ixy10 / T(7); + vec<4, T, Q> gy10 = floor(gx10) / T(7); + vec<4, T, Q> gz10 = floor(gy10) / T(6); + gx10 = fract(gx10) - T(0.5); + gy10 = fract(gy10) - T(0.5); + gz10 = fract(gz10) - T(0.5); + vec<4, T, Q> gw10 = vec<4, T, Q>(0.75) - abs(gx10) - abs(gy10) - abs(gz10); + vec<4, T, Q> sw10 = step(gw10, vec<4, T, Q>(0)); + gx10 -= sw10 * (step(T(0), gx10) - T(0.5)); + gy10 -= sw10 * (step(T(0), gy10) - T(0.5)); + + vec<4, T, Q> gx11 = ixy11 / T(7); + vec<4, T, Q> gy11 = floor(gx11) / T(7); + vec<4, T, Q> gz11 = floor(gy11) / T(6); + gx11 = fract(gx11) - T(0.5); + gy11 = fract(gy11) - T(0.5); + gz11 = fract(gz11) - T(0.5); + vec<4, T, Q> gw11 = vec<4, T, Q>(0.75) - abs(gx11) - abs(gy11) - abs(gz11); + vec<4, T, Q> sw11 = step(gw11, vec<4, T, Q>(0.0)); + gx11 -= sw11 * (step(T(0), gx11) - T(0.5)); + gy11 -= sw11 * (step(T(0), gy11) - T(0.5)); + + vec<4, T, Q> g0000(gx00.x, gy00.x, gz00.x, gw00.x); + vec<4, T, Q> g1000(gx00.y, gy00.y, gz00.y, gw00.y); + vec<4, T, Q> g0100(gx00.z, gy00.z, gz00.z, gw00.z); + vec<4, T, Q> g1100(gx00.w, gy00.w, gz00.w, gw00.w); + vec<4, T, Q> g0010(gx10.x, gy10.x, gz10.x, gw10.x); + vec<4, T, Q> g1010(gx10.y, gy10.y, gz10.y, gw10.y); + vec<4, T, Q> g0110(gx10.z, gy10.z, gz10.z, gw10.z); + vec<4, T, Q> g1110(gx10.w, gy10.w, gz10.w, gw10.w); + vec<4, T, Q> g0001(gx01.x, gy01.x, gz01.x, gw01.x); + vec<4, T, Q> g1001(gx01.y, gy01.y, gz01.y, gw01.y); + vec<4, T, Q> g0101(gx01.z, gy01.z, gz01.z, gw01.z); + vec<4, T, Q> g1101(gx01.w, gy01.w, gz01.w, gw01.w); + vec<4, T, Q> g0011(gx11.x, gy11.x, gz11.x, gw11.x); + vec<4, T, Q> g1011(gx11.y, gy11.y, gz11.y, gw11.y); + vec<4, T, Q> g0111(gx11.z, gy11.z, gz11.z, gw11.z); + vec<4, T, Q> g1111(gx11.w, gy11.w, gz11.w, gw11.w); + + vec<4, T, Q> norm00 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0000, g0000), dot(g0100, g0100), dot(g1000, g1000), dot(g1100, g1100))); + g0000 *= norm00.x; + g0100 *= norm00.y; + g1000 *= norm00.z; + g1100 *= norm00.w; + + vec<4, T, Q> norm01 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0001, g0001), dot(g0101, g0101), dot(g1001, g1001), dot(g1101, g1101))); + g0001 *= norm01.x; + g0101 *= norm01.y; + g1001 *= norm01.z; + g1101 *= norm01.w; + + vec<4, T, Q> norm10 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0010, g0010), dot(g0110, g0110), dot(g1010, g1010), dot(g1110, g1110))); + g0010 *= norm10.x; + g0110 *= norm10.y; + g1010 *= norm10.z; + g1110 *= norm10.w; + + vec<4, T, Q> norm11 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0011, g0011), dot(g0111, g0111), dot(g1011, g1011), dot(g1111, g1111))); + g0011 *= norm11.x; + g0111 *= norm11.y; + g1011 *= norm11.z; + g1111 *= norm11.w; + + T n0000 = dot(g0000, Pf0); + T n1000 = dot(g1000, vec<4, T, Q>(Pf1.x, Pf0.y, Pf0.z, Pf0.w)); + T n0100 = dot(g0100, vec<4, T, Q>(Pf0.x, Pf1.y, Pf0.z, Pf0.w)); + T n1100 = dot(g1100, vec<4, T, Q>(Pf1.x, Pf1.y, Pf0.z, Pf0.w)); + T n0010 = dot(g0010, vec<4, T, Q>(Pf0.x, Pf0.y, Pf1.z, Pf0.w)); + T n1010 = dot(g1010, vec<4, T, Q>(Pf1.x, Pf0.y, Pf1.z, Pf0.w)); + T n0110 = dot(g0110, vec<4, T, Q>(Pf0.x, Pf1.y, Pf1.z, Pf0.w)); + T n1110 = dot(g1110, vec<4, T, Q>(Pf1.x, Pf1.y, Pf1.z, Pf0.w)); + T n0001 = dot(g0001, vec<4, T, Q>(Pf0.x, Pf0.y, Pf0.z, Pf1.w)); + T n1001 = dot(g1001, vec<4, T, Q>(Pf1.x, Pf0.y, Pf0.z, Pf1.w)); + T n0101 = dot(g0101, vec<4, T, Q>(Pf0.x, Pf1.y, Pf0.z, Pf1.w)); + T n1101 = dot(g1101, vec<4, T, Q>(Pf1.x, Pf1.y, Pf0.z, Pf1.w)); + T n0011 = dot(g0011, vec<4, T, Q>(Pf0.x, Pf0.y, Pf1.z, Pf1.w)); + T n1011 = dot(g1011, vec<4, T, Q>(Pf1.x, Pf0.y, Pf1.z, Pf1.w)); + T n0111 = dot(g0111, vec<4, T, Q>(Pf0.x, Pf1.y, Pf1.z, Pf1.w)); + T n1111 = dot(g1111, Pf1); + + vec<4, T, Q> fade_xyzw = detail::fade(Pf0); + vec<4, T, Q> n_0w = mix(vec<4, T, Q>(n0000, n1000, n0100, n1100), vec<4, T, Q>(n0001, n1001, n0101, n1101), fade_xyzw.w); + vec<4, T, Q> n_1w = mix(vec<4, T, Q>(n0010, n1010, n0110, n1110), vec<4, T, Q>(n0011, n1011, n0111, n1111), fade_xyzw.w); + vec<4, T, Q> n_zw = mix(n_0w, n_1w, fade_xyzw.z); + vec<2, T, Q> n_yzw = mix(vec<2, T, Q>(n_zw.x, n_zw.y), vec<2, T, Q>(n_zw.z, n_zw.w), fade_xyzw.y); + T n_xyzw = mix(n_yzw.x, n_yzw.y, fade_xyzw.x); + return T(2.2) * n_xyzw; + } + + // Classic Perlin noise, periodic variant + template + GLM_FUNC_QUALIFIER T perlin(vec<2, T, Q> const& Position, vec<2, T, Q> const& rep) + { + vec<4, T, Q> Pi = floor(vec<4, T, Q>(Position.x, Position.y, Position.x, Position.y)) + vec<4, T, Q>(0.0, 0.0, 1.0, 1.0); + vec<4, T, Q> Pf = fract(vec<4, T, Q>(Position.x, Position.y, Position.x, Position.y)) - vec<4, T, Q>(0.0, 0.0, 1.0, 1.0); + Pi = mod(Pi, vec<4, T, Q>(rep.x, rep.y, rep.x, rep.y)); // To create noise with explicit period + Pi = mod(Pi, vec<4, T, Q>(289)); // To avoid truncation effects in permutation + vec<4, T, Q> ix(Pi.x, Pi.z, Pi.x, Pi.z); + vec<4, T, Q> iy(Pi.y, Pi.y, Pi.w, Pi.w); + vec<4, T, Q> fx(Pf.x, Pf.z, Pf.x, Pf.z); + vec<4, T, Q> fy(Pf.y, Pf.y, Pf.w, Pf.w); + + vec<4, T, Q> i = detail::permute(detail::permute(ix) + iy); + + vec<4, T, Q> gx = static_cast(2) * fract(i / T(41)) - T(1); + vec<4, T, Q> gy = abs(gx) - T(0.5); + vec<4, T, Q> tx = floor(gx + T(0.5)); + gx = gx - tx; + + vec<2, T, Q> g00(gx.x, gy.x); + vec<2, T, Q> g10(gx.y, gy.y); + vec<2, T, Q> g01(gx.z, gy.z); + vec<2, T, Q> g11(gx.w, gy.w); + + vec<4, T, Q> norm = detail::taylorInvSqrt(vec<4, T, Q>(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11))); + g00 *= norm.x; + g01 *= norm.y; + g10 *= norm.z; + g11 *= norm.w; + + T n00 = dot(g00, vec<2, T, Q>(fx.x, fy.x)); + T n10 = dot(g10, vec<2, T, Q>(fx.y, fy.y)); + T n01 = dot(g01, vec<2, T, Q>(fx.z, fy.z)); + T n11 = dot(g11, vec<2, T, Q>(fx.w, fy.w)); + + vec<2, T, Q> fade_xy = detail::fade(vec<2, T, Q>(Pf.x, Pf.y)); + vec<2, T, Q> n_x = mix(vec<2, T, Q>(n00, n01), vec<2, T, Q>(n10, n11), fade_xy.x); + T n_xy = mix(n_x.x, n_x.y, fade_xy.y); + return T(2.3) * n_xy; + } + + // Classic Perlin noise, periodic variant + template + GLM_FUNC_QUALIFIER T perlin(vec<3, T, Q> const& Position, vec<3, T, Q> const& rep) + { + vec<3, T, Q> Pi0 = mod(floor(Position), rep); // Integer part, modulo period + vec<3, T, Q> Pi1 = mod(Pi0 + vec<3, T, Q>(T(1)), rep); // Integer part + 1, mod period + Pi0 = mod(Pi0, vec<3, T, Q>(289)); + Pi1 = mod(Pi1, vec<3, T, Q>(289)); + vec<3, T, Q> Pf0 = fract(Position); // Fractional part for interpolation + vec<3, T, Q> Pf1 = Pf0 - vec<3, T, Q>(T(1)); // Fractional part - 1.0 + vec<4, T, Q> ix = vec<4, T, Q>(Pi0.x, Pi1.x, Pi0.x, Pi1.x); + vec<4, T, Q> iy = vec<4, T, Q>(Pi0.y, Pi0.y, Pi1.y, Pi1.y); + vec<4, T, Q> iz0(Pi0.z); + vec<4, T, Q> iz1(Pi1.z); + + vec<4, T, Q> ixy = detail::permute(detail::permute(ix) + iy); + vec<4, T, Q> ixy0 = detail::permute(ixy + iz0); + vec<4, T, Q> ixy1 = detail::permute(ixy + iz1); + + vec<4, T, Q> gx0 = ixy0 / T(7); + vec<4, T, Q> gy0 = fract(floor(gx0) / T(7)) - T(0.5); + gx0 = fract(gx0); + vec<4, T, Q> gz0 = vec<4, T, Q>(0.5) - abs(gx0) - abs(gy0); + vec<4, T, Q> sz0 = step(gz0, vec<4, T, Q>(0)); + gx0 -= sz0 * (step(T(0), gx0) - T(0.5)); + gy0 -= sz0 * (step(T(0), gy0) - T(0.5)); + + vec<4, T, Q> gx1 = ixy1 / T(7); + vec<4, T, Q> gy1 = fract(floor(gx1) / T(7)) - T(0.5); + gx1 = fract(gx1); + vec<4, T, Q> gz1 = vec<4, T, Q>(0.5) - abs(gx1) - abs(gy1); + vec<4, T, Q> sz1 = step(gz1, vec<4, T, Q>(T(0))); + gx1 -= sz1 * (step(T(0), gx1) - T(0.5)); + gy1 -= sz1 * (step(T(0), gy1) - T(0.5)); + + vec<3, T, Q> g000 = vec<3, T, Q>(gx0.x, gy0.x, gz0.x); + vec<3, T, Q> g100 = vec<3, T, Q>(gx0.y, gy0.y, gz0.y); + vec<3, T, Q> g010 = vec<3, T, Q>(gx0.z, gy0.z, gz0.z); + vec<3, T, Q> g110 = vec<3, T, Q>(gx0.w, gy0.w, gz0.w); + vec<3, T, Q> g001 = vec<3, T, Q>(gx1.x, gy1.x, gz1.x); + vec<3, T, Q> g101 = vec<3, T, Q>(gx1.y, gy1.y, gz1.y); + vec<3, T, Q> g011 = vec<3, T, Q>(gx1.z, gy1.z, gz1.z); + vec<3, T, Q> g111 = vec<3, T, Q>(gx1.w, gy1.w, gz1.w); + + vec<4, T, Q> norm0 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110))); + g000 *= norm0.x; + g010 *= norm0.y; + g100 *= norm0.z; + g110 *= norm0.w; + vec<4, T, Q> norm1 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g001, g001), dot(g011, g011), dot(g101, g101), dot(g111, g111))); + g001 *= norm1.x; + g011 *= norm1.y; + g101 *= norm1.z; + g111 *= norm1.w; + + T n000 = dot(g000, Pf0); + T n100 = dot(g100, vec<3, T, Q>(Pf1.x, Pf0.y, Pf0.z)); + T n010 = dot(g010, vec<3, T, Q>(Pf0.x, Pf1.y, Pf0.z)); + T n110 = dot(g110, vec<3, T, Q>(Pf1.x, Pf1.y, Pf0.z)); + T n001 = dot(g001, vec<3, T, Q>(Pf0.x, Pf0.y, Pf1.z)); + T n101 = dot(g101, vec<3, T, Q>(Pf1.x, Pf0.y, Pf1.z)); + T n011 = dot(g011, vec<3, T, Q>(Pf0.x, Pf1.y, Pf1.z)); + T n111 = dot(g111, Pf1); + + vec<3, T, Q> fade_xyz = detail::fade(Pf0); + vec<4, T, Q> n_z = mix(vec<4, T, Q>(n000, n100, n010, n110), vec<4, T, Q>(n001, n101, n011, n111), fade_xyz.z); + vec<2, T, Q> n_yz = mix(vec<2, T, Q>(n_z.x, n_z.y), vec<2, T, Q>(n_z.z, n_z.w), fade_xyz.y); + T n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); + return T(2.2) * n_xyz; + } + + // Classic Perlin noise, periodic version + template + GLM_FUNC_QUALIFIER T perlin(vec<4, T, Q> const& Position, vec<4, T, Q> const& rep) + { + vec<4, T, Q> Pi0 = mod(floor(Position), rep); // Integer part modulo rep + vec<4, T, Q> Pi1 = mod(Pi0 + T(1), rep); // Integer part + 1 mod rep + vec<4, T, Q> Pf0 = fract(Position); // Fractional part for interpolation + vec<4, T, Q> Pf1 = Pf0 - T(1); // Fractional part - 1.0 + vec<4, T, Q> ix = vec<4, T, Q>(Pi0.x, Pi1.x, Pi0.x, Pi1.x); + vec<4, T, Q> iy = vec<4, T, Q>(Pi0.y, Pi0.y, Pi1.y, Pi1.y); + vec<4, T, Q> iz0(Pi0.z); + vec<4, T, Q> iz1(Pi1.z); + vec<4, T, Q> iw0(Pi0.w); + vec<4, T, Q> iw1(Pi1.w); + + vec<4, T, Q> ixy = detail::permute(detail::permute(ix) + iy); + vec<4, T, Q> ixy0 = detail::permute(ixy + iz0); + vec<4, T, Q> ixy1 = detail::permute(ixy + iz1); + vec<4, T, Q> ixy00 = detail::permute(ixy0 + iw0); + vec<4, T, Q> ixy01 = detail::permute(ixy0 + iw1); + vec<4, T, Q> ixy10 = detail::permute(ixy1 + iw0); + vec<4, T, Q> ixy11 = detail::permute(ixy1 + iw1); + + vec<4, T, Q> gx00 = ixy00 / T(7); + vec<4, T, Q> gy00 = floor(gx00) / T(7); + vec<4, T, Q> gz00 = floor(gy00) / T(6); + gx00 = fract(gx00) - T(0.5); + gy00 = fract(gy00) - T(0.5); + gz00 = fract(gz00) - T(0.5); + vec<4, T, Q> gw00 = vec<4, T, Q>(0.75) - abs(gx00) - abs(gy00) - abs(gz00); + vec<4, T, Q> sw00 = step(gw00, vec<4, T, Q>(0)); + gx00 -= sw00 * (step(T(0), gx00) - T(0.5)); + gy00 -= sw00 * (step(T(0), gy00) - T(0.5)); + + vec<4, T, Q> gx01 = ixy01 / T(7); + vec<4, T, Q> gy01 = floor(gx01) / T(7); + vec<4, T, Q> gz01 = floor(gy01) / T(6); + gx01 = fract(gx01) - T(0.5); + gy01 = fract(gy01) - T(0.5); + gz01 = fract(gz01) - T(0.5); + vec<4, T, Q> gw01 = vec<4, T, Q>(0.75) - abs(gx01) - abs(gy01) - abs(gz01); + vec<4, T, Q> sw01 = step(gw01, vec<4, T, Q>(0.0)); + gx01 -= sw01 * (step(T(0), gx01) - T(0.5)); + gy01 -= sw01 * (step(T(0), gy01) - T(0.5)); + + vec<4, T, Q> gx10 = ixy10 / T(7); + vec<4, T, Q> gy10 = floor(gx10) / T(7); + vec<4, T, Q> gz10 = floor(gy10) / T(6); + gx10 = fract(gx10) - T(0.5); + gy10 = fract(gy10) - T(0.5); + gz10 = fract(gz10) - T(0.5); + vec<4, T, Q> gw10 = vec<4, T, Q>(0.75) - abs(gx10) - abs(gy10) - abs(gz10); + vec<4, T, Q> sw10 = step(gw10, vec<4, T, Q>(0.0)); + gx10 -= sw10 * (step(T(0), gx10) - T(0.5)); + gy10 -= sw10 * (step(T(0), gy10) - T(0.5)); + + vec<4, T, Q> gx11 = ixy11 / T(7); + vec<4, T, Q> gy11 = floor(gx11) / T(7); + vec<4, T, Q> gz11 = floor(gy11) / T(6); + gx11 = fract(gx11) - T(0.5); + gy11 = fract(gy11) - T(0.5); + gz11 = fract(gz11) - T(0.5); + vec<4, T, Q> gw11 = vec<4, T, Q>(0.75) - abs(gx11) - abs(gy11) - abs(gz11); + vec<4, T, Q> sw11 = step(gw11, vec<4, T, Q>(T(0))); + gx11 -= sw11 * (step(T(0), gx11) - T(0.5)); + gy11 -= sw11 * (step(T(0), gy11) - T(0.5)); + + vec<4, T, Q> g0000(gx00.x, gy00.x, gz00.x, gw00.x); + vec<4, T, Q> g1000(gx00.y, gy00.y, gz00.y, gw00.y); + vec<4, T, Q> g0100(gx00.z, gy00.z, gz00.z, gw00.z); + vec<4, T, Q> g1100(gx00.w, gy00.w, gz00.w, gw00.w); + vec<4, T, Q> g0010(gx10.x, gy10.x, gz10.x, gw10.x); + vec<4, T, Q> g1010(gx10.y, gy10.y, gz10.y, gw10.y); + vec<4, T, Q> g0110(gx10.z, gy10.z, gz10.z, gw10.z); + vec<4, T, Q> g1110(gx10.w, gy10.w, gz10.w, gw10.w); + vec<4, T, Q> g0001(gx01.x, gy01.x, gz01.x, gw01.x); + vec<4, T, Q> g1001(gx01.y, gy01.y, gz01.y, gw01.y); + vec<4, T, Q> g0101(gx01.z, gy01.z, gz01.z, gw01.z); + vec<4, T, Q> g1101(gx01.w, gy01.w, gz01.w, gw01.w); + vec<4, T, Q> g0011(gx11.x, gy11.x, gz11.x, gw11.x); + vec<4, T, Q> g1011(gx11.y, gy11.y, gz11.y, gw11.y); + vec<4, T, Q> g0111(gx11.z, gy11.z, gz11.z, gw11.z); + vec<4, T, Q> g1111(gx11.w, gy11.w, gz11.w, gw11.w); + + vec<4, T, Q> norm00 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0000, g0000), dot(g0100, g0100), dot(g1000, g1000), dot(g1100, g1100))); + g0000 *= norm00.x; + g0100 *= norm00.y; + g1000 *= norm00.z; + g1100 *= norm00.w; + + vec<4, T, Q> norm01 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0001, g0001), dot(g0101, g0101), dot(g1001, g1001), dot(g1101, g1101))); + g0001 *= norm01.x; + g0101 *= norm01.y; + g1001 *= norm01.z; + g1101 *= norm01.w; + + vec<4, T, Q> norm10 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0010, g0010), dot(g0110, g0110), dot(g1010, g1010), dot(g1110, g1110))); + g0010 *= norm10.x; + g0110 *= norm10.y; + g1010 *= norm10.z; + g1110 *= norm10.w; + + vec<4, T, Q> norm11 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0011, g0011), dot(g0111, g0111), dot(g1011, g1011), dot(g1111, g1111))); + g0011 *= norm11.x; + g0111 *= norm11.y; + g1011 *= norm11.z; + g1111 *= norm11.w; + + T n0000 = dot(g0000, Pf0); + T n1000 = dot(g1000, vec<4, T, Q>(Pf1.x, Pf0.y, Pf0.z, Pf0.w)); + T n0100 = dot(g0100, vec<4, T, Q>(Pf0.x, Pf1.y, Pf0.z, Pf0.w)); + T n1100 = dot(g1100, vec<4, T, Q>(Pf1.x, Pf1.y, Pf0.z, Pf0.w)); + T n0010 = dot(g0010, vec<4, T, Q>(Pf0.x, Pf0.y, Pf1.z, Pf0.w)); + T n1010 = dot(g1010, vec<4, T, Q>(Pf1.x, Pf0.y, Pf1.z, Pf0.w)); + T n0110 = dot(g0110, vec<4, T, Q>(Pf0.x, Pf1.y, Pf1.z, Pf0.w)); + T n1110 = dot(g1110, vec<4, T, Q>(Pf1.x, Pf1.y, Pf1.z, Pf0.w)); + T n0001 = dot(g0001, vec<4, T, Q>(Pf0.x, Pf0.y, Pf0.z, Pf1.w)); + T n1001 = dot(g1001, vec<4, T, Q>(Pf1.x, Pf0.y, Pf0.z, Pf1.w)); + T n0101 = dot(g0101, vec<4, T, Q>(Pf0.x, Pf1.y, Pf0.z, Pf1.w)); + T n1101 = dot(g1101, vec<4, T, Q>(Pf1.x, Pf1.y, Pf0.z, Pf1.w)); + T n0011 = dot(g0011, vec<4, T, Q>(Pf0.x, Pf0.y, Pf1.z, Pf1.w)); + T n1011 = dot(g1011, vec<4, T, Q>(Pf1.x, Pf0.y, Pf1.z, Pf1.w)); + T n0111 = dot(g0111, vec<4, T, Q>(Pf0.x, Pf1.y, Pf1.z, Pf1.w)); + T n1111 = dot(g1111, Pf1); + + vec<4, T, Q> fade_xyzw = detail::fade(Pf0); + vec<4, T, Q> n_0w = mix(vec<4, T, Q>(n0000, n1000, n0100, n1100), vec<4, T, Q>(n0001, n1001, n0101, n1101), fade_xyzw.w); + vec<4, T, Q> n_1w = mix(vec<4, T, Q>(n0010, n1010, n0110, n1110), vec<4, T, Q>(n0011, n1011, n0111, n1111), fade_xyzw.w); + vec<4, T, Q> n_zw = mix(n_0w, n_1w, fade_xyzw.z); + vec<2, T, Q> n_yzw = mix(vec<2, T, Q>(n_zw.x, n_zw.y), vec<2, T, Q>(n_zw.z, n_zw.w), fade_xyzw.y); + T n_xyzw = mix(n_yzw.x, n_yzw.y, fade_xyzw.x); + return T(2.2) * n_xyzw; + } + + template + GLM_FUNC_QUALIFIER T simplex(glm::vec<2, T, Q> const& v) + { + vec<4, T, Q> const C = vec<4, T, Q>( + T( 0.211324865405187), // (3.0 - sqrt(3.0)) / 6.0 + T( 0.366025403784439), // 0.5 * (sqrt(3.0) - 1.0) + T(-0.577350269189626), // -1.0 + 2.0 * C.x + T( 0.024390243902439)); // 1.0 / 41.0 + + // First corner + vec<2, T, Q> i = floor(v + dot(v, vec<2, T, Q>(C[1]))); + vec<2, T, Q> x0 = v - i + dot(i, vec<2, T, Q>(C[0])); + + // Other corners + //i1.x = step( x0.y, x0.x ); // x0.x > x0.y ? 1.0 : 0.0 + //i1.y = 1.0 - i1.x; + vec<2, T, Q> i1 = (x0.x > x0.y) ? vec<2, T, Q>(1, 0) : vec<2, T, Q>(0, 1); + // x0 = x0 - 0.0 + 0.0 * C.xx ; + // x1 = x0 - i1 + 1.0 * C.xx ; + // x2 = x0 - 1.0 + 2.0 * C.xx ; + vec<4, T, Q> x12 = vec<4, T, Q>(x0.x, x0.y, x0.x, x0.y) + vec<4, T, Q>(C.x, C.x, C.z, C.z); + x12 = vec<4, T, Q>(vec<2, T, Q>(x12) - i1, x12.z, x12.w); + + // Permutations + i = mod(i, vec<2, T, Q>(289)); // Avoid truncation effects in permutation + vec<3, T, Q> p = detail::permute( + detail::permute(i.y + vec<3, T, Q>(T(0), i1.y, T(1))) + + i.x + vec<3, T, Q>(T(0), i1.x, T(1))); + + vec<3, T, Q> m = max(vec<3, T, Q>(0.5) - vec<3, T, Q>( + dot(x0, x0), + dot(vec<2, T, Q>(x12.x, x12.y), vec<2, T, Q>(x12.x, x12.y)), + dot(vec<2, T, Q>(x12.z, x12.w), vec<2, T, Q>(x12.z, x12.w))), vec<3, T, Q>(0)); + m = m * m ; + m = m * m ; + + // Gradients: 41 points uniformly over a line, mapped onto a diamond. + // The ring size 17*17 = 289 is close to a multiple of 41 (41*7 = 287) + + vec<3, T, Q> x = static_cast(2) * fract(p * C.w) - T(1); + vec<3, T, Q> h = abs(x) - T(0.5); + vec<3, T, Q> ox = floor(x + T(0.5)); + vec<3, T, Q> a0 = x - ox; + + // Normalise gradients implicitly by scaling m + // Inlined for speed: m *= taylorInvSqrt( a0*a0 + h*h ); + m *= static_cast(1.79284291400159) - T(0.85373472095314) * (a0 * a0 + h * h); + + // Compute final noise value at P + vec<3, T, Q> g; + g.x = a0.x * x0.x + h.x * x0.y; + //g.yz = a0.yz * x12.xz + h.yz * x12.yw; + g.y = a0.y * x12.x + h.y * x12.y; + g.z = a0.z * x12.z + h.z * x12.w; + return T(130) * dot(m, g); + } + + template + GLM_FUNC_QUALIFIER T simplex(vec<3, T, Q> const& v) + { + vec<2, T, Q> const C(1.0 / 6.0, 1.0 / 3.0); + vec<4, T, Q> const D(0.0, 0.5, 1.0, 2.0); + + // First corner + vec<3, T, Q> i(floor(v + dot(v, vec<3, T, Q>(C.y)))); + vec<3, T, Q> x0(v - i + dot(i, vec<3, T, Q>(C.x))); + + // Other corners + vec<3, T, Q> g(step(vec<3, T, Q>(x0.y, x0.z, x0.x), x0)); + vec<3, T, Q> l(T(1) - g); + vec<3, T, Q> i1(min(g, vec<3, T, Q>(l.z, l.x, l.y))); + vec<3, T, Q> i2(max(g, vec<3, T, Q>(l.z, l.x, l.y))); + + // x0 = x0 - 0.0 + 0.0 * C.xxx; + // x1 = x0 - i1 + 1.0 * C.xxx; + // x2 = x0 - i2 + 2.0 * C.xxx; + // x3 = x0 - 1.0 + 3.0 * C.xxx; + vec<3, T, Q> x1(x0 - i1 + C.x); + vec<3, T, Q> x2(x0 - i2 + C.y); // 2.0*C.x = 1/3 = C.y + vec<3, T, Q> x3(x0 - D.y); // -1.0+3.0*C.x = -0.5 = -D.y + + // Permutations + i = detail::mod289(i); + vec<4, T, Q> p(detail::permute(detail::permute(detail::permute( + i.z + vec<4, T, Q>(T(0), i1.z, i2.z, T(1))) + + i.y + vec<4, T, Q>(T(0), i1.y, i2.y, T(1))) + + i.x + vec<4, T, Q>(T(0), i1.x, i2.x, T(1)))); + + // Gradients: 7x7 points over a square, mapped onto an octahedron. + // The ring size 17*17 = 289 is close to a multiple of 49 (49*6 = 294) + T n_ = static_cast(0.142857142857); // 1.0/7.0 + vec<3, T, Q> ns(n_ * vec<3, T, Q>(D.w, D.y, D.z) - vec<3, T, Q>(D.x, D.z, D.x)); + + vec<4, T, Q> j(p - T(49) * floor(p * ns.z * ns.z)); // mod(p,7*7) + + vec<4, T, Q> x_(floor(j * ns.z)); + vec<4, T, Q> y_(floor(j - T(7) * x_)); // mod(j,N) + + vec<4, T, Q> x(x_ * ns.x + ns.y); + vec<4, T, Q> y(y_ * ns.x + ns.y); + vec<4, T, Q> h(T(1) - abs(x) - abs(y)); + + vec<4, T, Q> b0(x.x, x.y, y.x, y.y); + vec<4, T, Q> b1(x.z, x.w, y.z, y.w); + + // vec4 s0 = vec4(lessThan(b0,0.0))*2.0 - 1.0; + // vec4 s1 = vec4(lessThan(b1,0.0))*2.0 - 1.0; + vec<4, T, Q> s0(floor(b0) * T(2) + T(1)); + vec<4, T, Q> s1(floor(b1) * T(2) + T(1)); + vec<4, T, Q> sh(-step(h, vec<4, T, Q>(0.0))); + + vec<4, T, Q> a0 = vec<4, T, Q>(b0.x, b0.z, b0.y, b0.w) + vec<4, T, Q>(s0.x, s0.z, s0.y, s0.w) * vec<4, T, Q>(sh.x, sh.x, sh.y, sh.y); + vec<4, T, Q> a1 = vec<4, T, Q>(b1.x, b1.z, b1.y, b1.w) + vec<4, T, Q>(s1.x, s1.z, s1.y, s1.w) * vec<4, T, Q>(sh.z, sh.z, sh.w, sh.w); + + vec<3, T, Q> p0(a0.x, a0.y, h.x); + vec<3, T, Q> p1(a0.z, a0.w, h.y); + vec<3, T, Q> p2(a1.x, a1.y, h.z); + vec<3, T, Q> p3(a1.z, a1.w, h.w); + + // Normalise gradients + vec<4, T, Q> norm = detail::taylorInvSqrt(vec<4, T, Q>(dot(p0, p0), dot(p1, p1), dot(p2, p2), dot(p3, p3))); + p0 *= norm.x; + p1 *= norm.y; + p2 *= norm.z; + p3 *= norm.w; + + // Mix final noise value + vec<4, T, Q> m = max(T(0.6) - vec<4, T, Q>(dot(x0, x0), dot(x1, x1), dot(x2, x2), dot(x3, x3)), vec<4, T, Q>(0)); + m = m * m; + return T(42) * dot(m * m, vec<4, T, Q>(dot(p0, x0), dot(p1, x1), dot(p2, x2), dot(p3, x3))); + } + + template + GLM_FUNC_QUALIFIER T simplex(vec<4, T, Q> const& v) + { + vec<4, T, Q> const C( + 0.138196601125011, // (5 - sqrt(5))/20 G4 + 0.276393202250021, // 2 * G4 + 0.414589803375032, // 3 * G4 + -0.447213595499958); // -1 + 4 * G4 + + // (sqrt(5) - 1)/4 = F4, used once below + T const F4 = static_cast(0.309016994374947451); + + // First corner + vec<4, T, Q> i = floor(v + dot(v, vec<4, T, Q>(F4))); + vec<4, T, Q> x0 = v - i + dot(i, vec<4, T, Q>(C.x)); + + // Other corners + + // Rank sorting originally contributed by Bill Licea-Kane, AMD (formerly ATI) + vec<4, T, Q> i0; + vec<3, T, Q> isX = step(vec<3, T, Q>(x0.y, x0.z, x0.w), vec<3, T, Q>(x0.x)); + vec<3, T, Q> isYZ = step(vec<3, T, Q>(x0.z, x0.w, x0.w), vec<3, T, Q>(x0.y, x0.y, x0.z)); + // i0.x = dot(isX, vec3(1.0)); + //i0.x = isX.x + isX.y + isX.z; + //i0.yzw = static_cast(1) - isX; + i0 = vec<4, T, Q>(isX.x + isX.y + isX.z, T(1) - isX); + // i0.y += dot(isYZ.xy, vec2(1.0)); + i0.y += isYZ.x + isYZ.y; + //i0.zw += 1.0 - vec<2, T, Q>(isYZ.x, isYZ.y); + i0.z += static_cast(1) - isYZ.x; + i0.w += static_cast(1) - isYZ.y; + i0.z += isYZ.z; + i0.w += static_cast(1) - isYZ.z; + + // i0 now contains the unique values 0,1,2,3 in each channel + vec<4, T, Q> i3 = clamp(i0, T(0), T(1)); + vec<4, T, Q> i2 = clamp(i0 - T(1), T(0), T(1)); + vec<4, T, Q> i1 = clamp(i0 - T(2), T(0), T(1)); + + // x0 = x0 - 0.0 + 0.0 * C.xxxx + // x1 = x0 - i1 + 0.0 * C.xxxx + // x2 = x0 - i2 + 0.0 * C.xxxx + // x3 = x0 - i3 + 0.0 * C.xxxx + // x4 = x0 - 1.0 + 4.0 * C.xxxx + vec<4, T, Q> x1 = x0 - i1 + C.x; + vec<4, T, Q> x2 = x0 - i2 + C.y; + vec<4, T, Q> x3 = x0 - i3 + C.z; + vec<4, T, Q> x4 = x0 + C.w; + + // Permutations + i = mod(i, vec<4, T, Q>(289)); + T j0 = detail::permute(detail::permute(detail::permute(detail::permute(i.w) + i.z) + i.y) + i.x); + vec<4, T, Q> j1 = detail::permute(detail::permute(detail::permute(detail::permute( + i.w + vec<4, T, Q>(i1.w, i2.w, i3.w, T(1))) + + i.z + vec<4, T, Q>(i1.z, i2.z, i3.z, T(1))) + + i.y + vec<4, T, Q>(i1.y, i2.y, i3.y, T(1))) + + i.x + vec<4, T, Q>(i1.x, i2.x, i3.x, T(1))); + + // Gradients: 7x7x6 points over a cube, mapped onto a 4-cross polytope + // 7*7*6 = 294, which is close to the ring size 17*17 = 289. + vec<4, T, Q> ip = vec<4, T, Q>(T(1) / T(294), T(1) / T(49), T(1) / T(7), T(0)); + + vec<4, T, Q> p0 = gtc::grad4(j0, ip); + vec<4, T, Q> p1 = gtc::grad4(j1.x, ip); + vec<4, T, Q> p2 = gtc::grad4(j1.y, ip); + vec<4, T, Q> p3 = gtc::grad4(j1.z, ip); + vec<4, T, Q> p4 = gtc::grad4(j1.w, ip); + + // Normalise gradients + vec<4, T, Q> norm = detail::taylorInvSqrt(vec<4, T, Q>(dot(p0, p0), dot(p1, p1), dot(p2, p2), dot(p3, p3))); + p0 *= norm.x; + p1 *= norm.y; + p2 *= norm.z; + p3 *= norm.w; + p4 *= detail::taylorInvSqrt(dot(p4, p4)); + + // Mix contributions from the five corners + vec<3, T, Q> m0 = max(T(0.6) - vec<3, T, Q>(dot(x0, x0), dot(x1, x1), dot(x2, x2)), vec<3, T, Q>(0)); + vec<2, T, Q> m1 = max(T(0.6) - vec<2, T, Q>(dot(x3, x3), dot(x4, x4) ), vec<2, T, Q>(0)); + m0 = m0 * m0; + m1 = m1 * m1; + return T(49) * + (dot(m0 * m0, vec<3, T, Q>(dot(p0, x0), dot(p1, x1), dot(p2, x2))) + + dot(m1 * m1, vec<2, T, Q>(dot(p3, x3), dot(p4, x4)))); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/packing.hpp b/MacroLibX/third_party/glm/gtc/packing.hpp new file mode 100644 index 0000000..7c64aba --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/packing.hpp @@ -0,0 +1,728 @@ +/// @ref gtc_packing +/// @file glm/gtc/packing.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_packing GLM_GTC_packing +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// This extension provides a set of function to convert vertors to packed +/// formats. + +#pragma once + +// Dependency: +#include "type_precision.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_packing extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_packing + /// @{ + + /// First, converts the normalized floating-point value v into a 8-bit integer value. + /// Then, the results are packed into the returned 8-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packUnorm1x8: round(clamp(c, 0, +1) * 255.0) + /// + /// @see gtc_packing + /// @see uint16 packUnorm2x8(vec2 const& v) + /// @see uint32 packUnorm4x8(vec4 const& v) + /// @see GLSL packUnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint8 packUnorm1x8(float v); + + /// Convert a single 8-bit integer to a normalized floating-point value. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackUnorm4x8: f / 255.0 + /// + /// @see gtc_packing + /// @see vec2 unpackUnorm2x8(uint16 p) + /// @see vec4 unpackUnorm4x8(uint32 p) + /// @see GLSL unpackUnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL float unpackUnorm1x8(uint8 p); + + /// First, converts each component of the normalized floating-point value v into 8-bit integer values. + /// Then, the results are packed into the returned 16-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packUnorm2x8: round(clamp(c, 0, +1) * 255.0) + /// + /// The first component of the vector will be written to the least significant bits of the output; + /// the last component will be written to the most significant bits. + /// + /// @see gtc_packing + /// @see uint8 packUnorm1x8(float const& v) + /// @see uint32 packUnorm4x8(vec4 const& v) + /// @see GLSL packUnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint16 packUnorm2x8(vec2 const& v); + + /// First, unpacks a single 16-bit unsigned integer p into a pair of 8-bit unsigned integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned two-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackUnorm4x8: f / 255.0 + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see float unpackUnorm1x8(uint8 v) + /// @see vec4 unpackUnorm4x8(uint32 p) + /// @see GLSL unpackUnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec2 unpackUnorm2x8(uint16 p); + + /// First, converts the normalized floating-point value v into 8-bit integer value. + /// Then, the results are packed into the returned 8-bit unsigned integer. + /// + /// The conversion to fixed point is done as follows: + /// packSnorm1x8: round(clamp(s, -1, +1) * 127.0) + /// + /// @see gtc_packing + /// @see uint16 packSnorm2x8(vec2 const& v) + /// @see uint32 packSnorm4x8(vec4 const& v) + /// @see GLSL packSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint8 packSnorm1x8(float s); + + /// First, unpacks a single 8-bit unsigned integer p into a single 8-bit signed integers. + /// Then, the value is converted to a normalized floating-point value to generate the returned scalar. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackSnorm1x8: clamp(f / 127.0, -1, +1) + /// + /// @see gtc_packing + /// @see vec2 unpackSnorm2x8(uint16 p) + /// @see vec4 unpackSnorm4x8(uint32 p) + /// @see GLSL unpackSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL float unpackSnorm1x8(uint8 p); + + /// First, converts each component of the normalized floating-point value v into 8-bit integer values. + /// Then, the results are packed into the returned 16-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packSnorm2x8: round(clamp(c, -1, +1) * 127.0) + /// + /// The first component of the vector will be written to the least significant bits of the output; + /// the last component will be written to the most significant bits. + /// + /// @see gtc_packing + /// @see uint8 packSnorm1x8(float const& v) + /// @see uint32 packSnorm4x8(vec4 const& v) + /// @see GLSL packSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint16 packSnorm2x8(vec2 const& v); + + /// First, unpacks a single 16-bit unsigned integer p into a pair of 8-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned two-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackSnorm2x8: clamp(f / 127.0, -1, +1) + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see float unpackSnorm1x8(uint8 p) + /// @see vec4 unpackSnorm4x8(uint32 p) + /// @see GLSL unpackSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec2 unpackSnorm2x8(uint16 p); + + /// First, converts the normalized floating-point value v into a 16-bit integer value. + /// Then, the results are packed into the returned 16-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packUnorm1x16: round(clamp(c, 0, +1) * 65535.0) + /// + /// @see gtc_packing + /// @see uint16 packSnorm1x16(float const& v) + /// @see uint64 packSnorm4x16(vec4 const& v) + /// @see GLSL packUnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint16 packUnorm1x16(float v); + + /// First, unpacks a single 16-bit unsigned integer p into a of 16-bit unsigned integers. + /// Then, the value is converted to a normalized floating-point value to generate the returned scalar. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackUnorm1x16: f / 65535.0 + /// + /// @see gtc_packing + /// @see vec2 unpackUnorm2x16(uint32 p) + /// @see vec4 unpackUnorm4x16(uint64 p) + /// @see GLSL unpackUnorm2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL float unpackUnorm1x16(uint16 p); + + /// First, converts each component of the normalized floating-point value v into 16-bit integer values. + /// Then, the results are packed into the returned 64-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packUnorm4x16: round(clamp(c, 0, +1) * 65535.0) + /// + /// The first component of the vector will be written to the least significant bits of the output; + /// the last component will be written to the most significant bits. + /// + /// @see gtc_packing + /// @see uint16 packUnorm1x16(float const& v) + /// @see uint32 packUnorm2x16(vec2 const& v) + /// @see GLSL packUnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint64 packUnorm4x16(vec4 const& v); + + /// First, unpacks a single 64-bit unsigned integer p into four 16-bit unsigned integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned four-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackUnormx4x16: f / 65535.0 + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see float unpackUnorm1x16(uint16 p) + /// @see vec2 unpackUnorm2x16(uint32 p) + /// @see GLSL unpackUnorm2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec4 unpackUnorm4x16(uint64 p); + + /// First, converts the normalized floating-point value v into 16-bit integer value. + /// Then, the results are packed into the returned 16-bit unsigned integer. + /// + /// The conversion to fixed point is done as follows: + /// packSnorm1x8: round(clamp(s, -1, +1) * 32767.0) + /// + /// @see gtc_packing + /// @see uint32 packSnorm2x16(vec2 const& v) + /// @see uint64 packSnorm4x16(vec4 const& v) + /// @see GLSL packSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint16 packSnorm1x16(float v); + + /// First, unpacks a single 16-bit unsigned integer p into a single 16-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned scalar. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackSnorm1x16: clamp(f / 32767.0, -1, +1) + /// + /// @see gtc_packing + /// @see vec2 unpackSnorm2x16(uint32 p) + /// @see vec4 unpackSnorm4x16(uint64 p) + /// @see GLSL unpackSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL float unpackSnorm1x16(uint16 p); + + /// First, converts each component of the normalized floating-point value v into 16-bit integer values. + /// Then, the results are packed into the returned 64-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packSnorm2x8: round(clamp(c, -1, +1) * 32767.0) + /// + /// The first component of the vector will be written to the least significant bits of the output; + /// the last component will be written to the most significant bits. + /// + /// @see gtc_packing + /// @see uint16 packSnorm1x16(float const& v) + /// @see uint32 packSnorm2x16(vec2 const& v) + /// @see GLSL packSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint64 packSnorm4x16(vec4 const& v); + + /// First, unpacks a single 64-bit unsigned integer p into four 16-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned four-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackSnorm4x16: clamp(f / 32767.0, -1, +1) + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see float unpackSnorm1x16(uint16 p) + /// @see vec2 unpackSnorm2x16(uint32 p) + /// @see GLSL unpackSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec4 unpackSnorm4x16(uint64 p); + + /// Returns an unsigned integer obtained by converting the components of a floating-point scalar + /// to the 16-bit floating-point representation found in the OpenGL Specification, + /// and then packing this 16-bit value into a 16-bit unsigned integer. + /// + /// @see gtc_packing + /// @see uint32 packHalf2x16(vec2 const& v) + /// @see uint64 packHalf4x16(vec4 const& v) + /// @see GLSL packHalf2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint16 packHalf1x16(float v); + + /// Returns a floating-point scalar with components obtained by unpacking a 16-bit unsigned integer into a 16-bit value, + /// interpreted as a 16-bit floating-point number according to the OpenGL Specification, + /// and converting it to 32-bit floating-point values. + /// + /// @see gtc_packing + /// @see vec2 unpackHalf2x16(uint32 const& v) + /// @see vec4 unpackHalf4x16(uint64 const& v) + /// @see GLSL unpackHalf2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL float unpackHalf1x16(uint16 v); + + /// Returns an unsigned integer obtained by converting the components of a four-component floating-point vector + /// to the 16-bit floating-point representation found in the OpenGL Specification, + /// and then packing these four 16-bit values into a 64-bit unsigned integer. + /// The first vector component specifies the 16 least-significant bits of the result; + /// the forth component specifies the 16 most-significant bits. + /// + /// @see gtc_packing + /// @see uint16 packHalf1x16(float const& v) + /// @see uint32 packHalf2x16(vec2 const& v) + /// @see GLSL packHalf2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint64 packHalf4x16(vec4 const& v); + + /// Returns a four-component floating-point vector with components obtained by unpacking a 64-bit unsigned integer into four 16-bit values, + /// interpreting those values as 16-bit floating-point numbers according to the OpenGL Specification, + /// and converting them to 32-bit floating-point values. + /// The first component of the vector is obtained from the 16 least-significant bits of v; + /// the forth component is obtained from the 16 most-significant bits of v. + /// + /// @see gtc_packing + /// @see float unpackHalf1x16(uint16 const& v) + /// @see vec2 unpackHalf2x16(uint32 const& v) + /// @see GLSL unpackHalf2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec4 unpackHalf4x16(uint64 p); + + /// Returns an unsigned integer obtained by converting the components of a four-component signed integer vector + /// to the 10-10-10-2-bit signed integer representation found in the OpenGL Specification, + /// and then packing these four values into a 32-bit unsigned integer. + /// The first vector component specifies the 10 least-significant bits of the result; + /// the forth component specifies the 2 most-significant bits. + /// + /// @see gtc_packing + /// @see uint32 packI3x10_1x2(uvec4 const& v) + /// @see uint32 packSnorm3x10_1x2(vec4 const& v) + /// @see uint32 packUnorm3x10_1x2(vec4 const& v) + /// @see ivec4 unpackI3x10_1x2(uint32 const& p) + GLM_FUNC_DECL uint32 packI3x10_1x2(ivec4 const& v); + + /// Unpacks a single 32-bit unsigned integer p into three 10-bit and one 2-bit signed integers. + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see uint32 packU3x10_1x2(uvec4 const& v) + /// @see vec4 unpackSnorm3x10_1x2(uint32 const& p); + /// @see uvec4 unpackI3x10_1x2(uint32 const& p); + GLM_FUNC_DECL ivec4 unpackI3x10_1x2(uint32 p); + + /// Returns an unsigned integer obtained by converting the components of a four-component unsigned integer vector + /// to the 10-10-10-2-bit unsigned integer representation found in the OpenGL Specification, + /// and then packing these four values into a 32-bit unsigned integer. + /// The first vector component specifies the 10 least-significant bits of the result; + /// the forth component specifies the 2 most-significant bits. + /// + /// @see gtc_packing + /// @see uint32 packI3x10_1x2(ivec4 const& v) + /// @see uint32 packSnorm3x10_1x2(vec4 const& v) + /// @see uint32 packUnorm3x10_1x2(vec4 const& v) + /// @see ivec4 unpackU3x10_1x2(uint32 const& p) + GLM_FUNC_DECL uint32 packU3x10_1x2(uvec4 const& v); + + /// Unpacks a single 32-bit unsigned integer p into three 10-bit and one 2-bit unsigned integers. + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see uint32 packU3x10_1x2(uvec4 const& v) + /// @see vec4 unpackSnorm3x10_1x2(uint32 const& p); + /// @see uvec4 unpackI3x10_1x2(uint32 const& p); + GLM_FUNC_DECL uvec4 unpackU3x10_1x2(uint32 p); + + /// First, converts the first three components of the normalized floating-point value v into 10-bit signed integer values. + /// Then, converts the forth component of the normalized floating-point value v into 2-bit signed integer values. + /// Then, the results are packed into the returned 32-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packSnorm3x10_1x2(xyz): round(clamp(c, -1, +1) * 511.0) + /// packSnorm3x10_1x2(w): round(clamp(c, -1, +1) * 1.0) + /// + /// The first vector component specifies the 10 least-significant bits of the result; + /// the forth component specifies the 2 most-significant bits. + /// + /// @see gtc_packing + /// @see vec4 unpackSnorm3x10_1x2(uint32 const& p) + /// @see uint32 packUnorm3x10_1x2(vec4 const& v) + /// @see uint32 packU3x10_1x2(uvec4 const& v) + /// @see uint32 packI3x10_1x2(ivec4 const& v) + GLM_FUNC_DECL uint32 packSnorm3x10_1x2(vec4 const& v); + + /// First, unpacks a single 32-bit unsigned integer p into four 16-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned four-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackSnorm3x10_1x2(xyz): clamp(f / 511.0, -1, +1) + /// unpackSnorm3x10_1x2(w): clamp(f / 511.0, -1, +1) + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see uint32 packSnorm3x10_1x2(vec4 const& v) + /// @see vec4 unpackUnorm3x10_1x2(uint32 const& p)) + /// @see uvec4 unpackI3x10_1x2(uint32 const& p) + /// @see uvec4 unpackU3x10_1x2(uint32 const& p) + GLM_FUNC_DECL vec4 unpackSnorm3x10_1x2(uint32 p); + + /// First, converts the first three components of the normalized floating-point value v into 10-bit unsigned integer values. + /// Then, converts the forth component of the normalized floating-point value v into 2-bit signed uninteger values. + /// Then, the results are packed into the returned 32-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packUnorm3x10_1x2(xyz): round(clamp(c, 0, +1) * 1023.0) + /// packUnorm3x10_1x2(w): round(clamp(c, 0, +1) * 3.0) + /// + /// The first vector component specifies the 10 least-significant bits of the result; + /// the forth component specifies the 2 most-significant bits. + /// + /// @see gtc_packing + /// @see vec4 unpackUnorm3x10_1x2(uint32 const& p) + /// @see uint32 packUnorm3x10_1x2(vec4 const& v) + /// @see uint32 packU3x10_1x2(uvec4 const& v) + /// @see uint32 packI3x10_1x2(ivec4 const& v) + GLM_FUNC_DECL uint32 packUnorm3x10_1x2(vec4 const& v); + + /// First, unpacks a single 32-bit unsigned integer p into four 16-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned four-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackSnorm3x10_1x2(xyz): clamp(f / 1023.0, 0, +1) + /// unpackSnorm3x10_1x2(w): clamp(f / 3.0, 0, +1) + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see uint32 packSnorm3x10_1x2(vec4 const& v) + /// @see vec4 unpackInorm3x10_1x2(uint32 const& p)) + /// @see uvec4 unpackI3x10_1x2(uint32 const& p) + /// @see uvec4 unpackU3x10_1x2(uint32 const& p) + GLM_FUNC_DECL vec4 unpackUnorm3x10_1x2(uint32 p); + + /// First, converts the first two components of the normalized floating-point value v into 11-bit signless floating-point values. + /// Then, converts the third component of the normalized floating-point value v into a 10-bit signless floating-point value. + /// Then, the results are packed into the returned 32-bit unsigned integer. + /// + /// The first vector component specifies the 11 least-significant bits of the result; + /// the last component specifies the 10 most-significant bits. + /// + /// @see gtc_packing + /// @see vec3 unpackF2x11_1x10(uint32 const& p) + GLM_FUNC_DECL uint32 packF2x11_1x10(vec3 const& v); + + /// First, unpacks a single 32-bit unsigned integer p into two 11-bit signless floating-point values and one 10-bit signless floating-point value . + /// Then, each component is converted to a normalized floating-point value to generate the returned three-component vector. + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see gtc_packing + /// @see uint32 packF2x11_1x10(vec3 const& v) + GLM_FUNC_DECL vec3 unpackF2x11_1x10(uint32 p); + + + /// First, converts the first two components of the normalized floating-point value v into 11-bit signless floating-point values. + /// Then, converts the third component of the normalized floating-point value v into a 10-bit signless floating-point value. + /// Then, the results are packed into the returned 32-bit unsigned integer. + /// + /// The first vector component specifies the 11 least-significant bits of the result; + /// the last component specifies the 10 most-significant bits. + /// + /// packF3x9_E1x5 allows encoding into RGBE / RGB9E5 format + /// + /// @see gtc_packing + /// @see vec3 unpackF3x9_E1x5(uint32 const& p) + GLM_FUNC_DECL uint32 packF3x9_E1x5(vec3 const& v); + + /// First, unpacks a single 32-bit unsigned integer p into two 11-bit signless floating-point values and one 10-bit signless floating-point value . + /// Then, each component is converted to a normalized floating-point value to generate the returned three-component vector. + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// unpackF3x9_E1x5 allows decoding RGBE / RGB9E5 data + /// + /// @see gtc_packing + /// @see uint32 packF3x9_E1x5(vec3 const& v) + GLM_FUNC_DECL vec3 unpackF3x9_E1x5(uint32 p); + + /// Returns an unsigned integer vector obtained by converting the components of a floating-point vector + /// to the 16-bit floating-point representation found in the OpenGL Specification. + /// The first vector component specifies the 16 least-significant bits of the result; + /// the forth component specifies the 16 most-significant bits. + /// + /// @see gtc_packing + /// @see vec<3, T, Q> unpackRGBM(vec<4, T, Q> const& p) + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + template + GLM_FUNC_DECL vec<4, T, Q> packRGBM(vec<3, T, Q> const& rgb); + + /// Returns a floating-point vector with components obtained by reinterpreting an integer vector as 16-bit floating-point numbers and converting them to 32-bit floating-point values. + /// The first component of the vector is obtained from the 16 least-significant bits of v; + /// the forth component is obtained from the 16 most-significant bits of v. + /// + /// @see gtc_packing + /// @see vec<4, T, Q> packRGBM(vec<3, float, Q> const& v) + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + template + GLM_FUNC_DECL vec<3, T, Q> unpackRGBM(vec<4, T, Q> const& rgbm); + + /// Returns an unsigned integer vector obtained by converting the components of a floating-point vector + /// to the 16-bit floating-point representation found in the OpenGL Specification. + /// The first vector component specifies the 16 least-significant bits of the result; + /// the forth component specifies the 16 most-significant bits. + /// + /// @see gtc_packing + /// @see vec unpackHalf(vec const& p) + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + template + GLM_FUNC_DECL vec packHalf(vec const& v); + + /// Returns a floating-point vector with components obtained by reinterpreting an integer vector as 16-bit floating-point numbers and converting them to 32-bit floating-point values. + /// The first component of the vector is obtained from the 16 least-significant bits of v; + /// the forth component is obtained from the 16 most-significant bits of v. + /// + /// @see gtc_packing + /// @see vec packHalf(vec const& v) + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + template + GLM_FUNC_DECL vec unpackHalf(vec const& p); + + /// Convert each component of the normalized floating-point vector into unsigned integer values. + /// + /// @see gtc_packing + /// @see vec unpackUnorm(vec const& p); + template + GLM_FUNC_DECL vec packUnorm(vec const& v); + + /// Convert a packed integer to a normalized floating-point vector. + /// + /// @see gtc_packing + /// @see vec packUnorm(vec const& v) + template + GLM_FUNC_DECL vec unpackUnorm(vec const& v); + + /// Convert each component of the normalized floating-point vector into signed integer values. + /// + /// @see gtc_packing + /// @see vec unpackSnorm(vec const& p); + template + GLM_FUNC_DECL vec packSnorm(vec const& v); + + /// Convert a packed integer to a normalized floating-point vector. + /// + /// @see gtc_packing + /// @see vec packSnorm(vec const& v) + template + GLM_FUNC_DECL vec unpackSnorm(vec const& v); + + /// Convert each component of the normalized floating-point vector into unsigned integer values. + /// + /// @see gtc_packing + /// @see vec2 unpackUnorm2x4(uint8 p) + GLM_FUNC_DECL uint8 packUnorm2x4(vec2 const& v); + + /// Convert a packed integer to a normalized floating-point vector. + /// + /// @see gtc_packing + /// @see uint8 packUnorm2x4(vec2 const& v) + GLM_FUNC_DECL vec2 unpackUnorm2x4(uint8 p); + + /// Convert each component of the normalized floating-point vector into unsigned integer values. + /// + /// @see gtc_packing + /// @see vec4 unpackUnorm4x4(uint16 p) + GLM_FUNC_DECL uint16 packUnorm4x4(vec4 const& v); + + /// Convert a packed integer to a normalized floating-point vector. + /// + /// @see gtc_packing + /// @see uint16 packUnorm4x4(vec4 const& v) + GLM_FUNC_DECL vec4 unpackUnorm4x4(uint16 p); + + /// Convert each component of the normalized floating-point vector into unsigned integer values. + /// + /// @see gtc_packing + /// @see vec3 unpackUnorm1x5_1x6_1x5(uint16 p) + GLM_FUNC_DECL uint16 packUnorm1x5_1x6_1x5(vec3 const& v); + + /// Convert a packed integer to a normalized floating-point vector. + /// + /// @see gtc_packing + /// @see uint16 packUnorm1x5_1x6_1x5(vec3 const& v) + GLM_FUNC_DECL vec3 unpackUnorm1x5_1x6_1x5(uint16 p); + + /// Convert each component of the normalized floating-point vector into unsigned integer values. + /// + /// @see gtc_packing + /// @see vec4 unpackUnorm3x5_1x1(uint16 p) + GLM_FUNC_DECL uint16 packUnorm3x5_1x1(vec4 const& v); + + /// Convert a packed integer to a normalized floating-point vector. + /// + /// @see gtc_packing + /// @see uint16 packUnorm3x5_1x1(vec4 const& v) + GLM_FUNC_DECL vec4 unpackUnorm3x5_1x1(uint16 p); + + /// Convert each component of the normalized floating-point vector into unsigned integer values. + /// + /// @see gtc_packing + /// @see vec3 unpackUnorm2x3_1x2(uint8 p) + GLM_FUNC_DECL uint8 packUnorm2x3_1x2(vec3 const& v); + + /// Convert a packed integer to a normalized floating-point vector. + /// + /// @see gtc_packing + /// @see uint8 packUnorm2x3_1x2(vec3 const& v) + GLM_FUNC_DECL vec3 unpackUnorm2x3_1x2(uint8 p); + + + + /// Convert each component from an integer vector into a packed integer. + /// + /// @see gtc_packing + /// @see i8vec2 unpackInt2x8(int16 p) + GLM_FUNC_DECL int16 packInt2x8(i8vec2 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see int16 packInt2x8(i8vec2 const& v) + GLM_FUNC_DECL i8vec2 unpackInt2x8(int16 p); + + /// Convert each component from an integer vector into a packed unsigned integer. + /// + /// @see gtc_packing + /// @see u8vec2 unpackInt2x8(uint16 p) + GLM_FUNC_DECL uint16 packUint2x8(u8vec2 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see uint16 packInt2x8(u8vec2 const& v) + GLM_FUNC_DECL u8vec2 unpackUint2x8(uint16 p); + + /// Convert each component from an integer vector into a packed integer. + /// + /// @see gtc_packing + /// @see i8vec4 unpackInt4x8(int32 p) + GLM_FUNC_DECL int32 packInt4x8(i8vec4 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see int32 packInt2x8(i8vec4 const& v) + GLM_FUNC_DECL i8vec4 unpackInt4x8(int32 p); + + /// Convert each component from an integer vector into a packed unsigned integer. + /// + /// @see gtc_packing + /// @see u8vec4 unpackUint4x8(uint32 p) + GLM_FUNC_DECL uint32 packUint4x8(u8vec4 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see uint32 packUint4x8(u8vec2 const& v) + GLM_FUNC_DECL u8vec4 unpackUint4x8(uint32 p); + + /// Convert each component from an integer vector into a packed integer. + /// + /// @see gtc_packing + /// @see i16vec2 unpackInt2x16(int p) + GLM_FUNC_DECL int packInt2x16(i16vec2 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see int packInt2x16(i16vec2 const& v) + GLM_FUNC_DECL i16vec2 unpackInt2x16(int p); + + /// Convert each component from an integer vector into a packed integer. + /// + /// @see gtc_packing + /// @see i16vec4 unpackInt4x16(int64 p) + GLM_FUNC_DECL int64 packInt4x16(i16vec4 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see int64 packInt4x16(i16vec4 const& v) + GLM_FUNC_DECL i16vec4 unpackInt4x16(int64 p); + + /// Convert each component from an integer vector into a packed unsigned integer. + /// + /// @see gtc_packing + /// @see u16vec2 unpackUint2x16(uint p) + GLM_FUNC_DECL uint packUint2x16(u16vec2 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see uint packUint2x16(u16vec2 const& v) + GLM_FUNC_DECL u16vec2 unpackUint2x16(uint p); + + /// Convert each component from an integer vector into a packed unsigned integer. + /// + /// @see gtc_packing + /// @see u16vec4 unpackUint4x16(uint64 p) + GLM_FUNC_DECL uint64 packUint4x16(u16vec4 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see uint64 packUint4x16(u16vec4 const& v) + GLM_FUNC_DECL u16vec4 unpackUint4x16(uint64 p); + + /// Convert each component from an integer vector into a packed integer. + /// + /// @see gtc_packing + /// @see i32vec2 unpackInt2x32(int p) + GLM_FUNC_DECL int64 packInt2x32(i32vec2 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see int packInt2x16(i32vec2 const& v) + GLM_FUNC_DECL i32vec2 unpackInt2x32(int64 p); + + /// Convert each component from an integer vector into a packed unsigned integer. + /// + /// @see gtc_packing + /// @see u32vec2 unpackUint2x32(int p) + GLM_FUNC_DECL uint64 packUint2x32(u32vec2 const& v); + + /// Convert a packed integer into an integer vector. + /// + /// @see gtc_packing + /// @see int packUint2x16(u32vec2 const& v) + GLM_FUNC_DECL u32vec2 unpackUint2x32(uint64 p); + + + /// @} +}// namespace glm + +#include "packing.inl" diff --git a/MacroLibX/third_party/glm/gtc/packing.inl b/MacroLibX/third_party/glm/gtc/packing.inl new file mode 100644 index 0000000..8c906e1 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/packing.inl @@ -0,0 +1,938 @@ +/// @ref gtc_packing + +#include "../ext/scalar_relational.hpp" +#include "../ext/vector_relational.hpp" +#include "../common.hpp" +#include "../vec2.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" +#include "../detail/type_half.hpp" +#include +#include + +namespace glm{ +namespace detail +{ + GLM_FUNC_QUALIFIER glm::uint16 float2half(glm::uint32 f) + { + // 10 bits => EE EEEFFFFF + // 11 bits => EEE EEFFFFFF + // Half bits => SEEEEEFF FFFFFFFF + // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF + + // 0x00007c00 => 00000000 00000000 01111100 00000000 + // 0x000003ff => 00000000 00000000 00000011 11111111 + // 0x38000000 => 00111000 00000000 00000000 00000000 + // 0x7f800000 => 01111111 10000000 00000000 00000000 + // 0x00008000 => 00000000 00000000 10000000 00000000 + return + ((f >> 16) & 0x8000) | // sign + ((((f & 0x7f800000) - 0x38000000) >> 13) & 0x7c00) | // exponential + ((f >> 13) & 0x03ff); // Mantissa + } + + GLM_FUNC_QUALIFIER glm::uint32 float2packed11(glm::uint32 f) + { + // 10 bits => EE EEEFFFFF + // 11 bits => EEE EEFFFFFF + // Half bits => SEEEEEFF FFFFFFFF + // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF + + // 0x000007c0 => 00000000 00000000 00000111 11000000 + // 0x00007c00 => 00000000 00000000 01111100 00000000 + // 0x000003ff => 00000000 00000000 00000011 11111111 + // 0x38000000 => 00111000 00000000 00000000 00000000 + // 0x7f800000 => 01111111 10000000 00000000 00000000 + // 0x00008000 => 00000000 00000000 10000000 00000000 + return + ((((f & 0x7f800000) - 0x38000000) >> 17) & 0x07c0) | // exponential + ((f >> 17) & 0x003f); // Mantissa + } + + GLM_FUNC_QUALIFIER glm::uint32 packed11ToFloat(glm::uint32 p) + { + // 10 bits => EE EEEFFFFF + // 11 bits => EEE EEFFFFFF + // Half bits => SEEEEEFF FFFFFFFF + // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF + + // 0x000007c0 => 00000000 00000000 00000111 11000000 + // 0x00007c00 => 00000000 00000000 01111100 00000000 + // 0x000003ff => 00000000 00000000 00000011 11111111 + // 0x38000000 => 00111000 00000000 00000000 00000000 + // 0x7f800000 => 01111111 10000000 00000000 00000000 + // 0x00008000 => 00000000 00000000 10000000 00000000 + return + ((((p & 0x07c0) << 17) + 0x38000000) & 0x7f800000) | // exponential + ((p & 0x003f) << 17); // Mantissa + } + + GLM_FUNC_QUALIFIER glm::uint32 float2packed10(glm::uint32 f) + { + // 10 bits => EE EEEFFFFF + // 11 bits => EEE EEFFFFFF + // Half bits => SEEEEEFF FFFFFFFF + // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF + + // 0x0000001F => 00000000 00000000 00000000 00011111 + // 0x0000003F => 00000000 00000000 00000000 00111111 + // 0x000003E0 => 00000000 00000000 00000011 11100000 + // 0x000007C0 => 00000000 00000000 00000111 11000000 + // 0x00007C00 => 00000000 00000000 01111100 00000000 + // 0x000003FF => 00000000 00000000 00000011 11111111 + // 0x38000000 => 00111000 00000000 00000000 00000000 + // 0x7f800000 => 01111111 10000000 00000000 00000000 + // 0x00008000 => 00000000 00000000 10000000 00000000 + return + ((((f & 0x7f800000) - 0x38000000) >> 18) & 0x03E0) | // exponential + ((f >> 18) & 0x001f); // Mantissa + } + + GLM_FUNC_QUALIFIER glm::uint32 packed10ToFloat(glm::uint32 p) + { + // 10 bits => EE EEEFFFFF + // 11 bits => EEE EEFFFFFF + // Half bits => SEEEEEFF FFFFFFFF + // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF + + // 0x0000001F => 00000000 00000000 00000000 00011111 + // 0x0000003F => 00000000 00000000 00000000 00111111 + // 0x000003E0 => 00000000 00000000 00000011 11100000 + // 0x000007C0 => 00000000 00000000 00000111 11000000 + // 0x00007C00 => 00000000 00000000 01111100 00000000 + // 0x000003FF => 00000000 00000000 00000011 11111111 + // 0x38000000 => 00111000 00000000 00000000 00000000 + // 0x7f800000 => 01111111 10000000 00000000 00000000 + // 0x00008000 => 00000000 00000000 10000000 00000000 + return + ((((p & 0x03E0) << 18) + 0x38000000) & 0x7f800000) | // exponential + ((p & 0x001f) << 18); // Mantissa + } + + GLM_FUNC_QUALIFIER glm::uint half2float(glm::uint h) + { + return ((h & 0x8000) << 16) | ((( h & 0x7c00) + 0x1C000) << 13) | ((h & 0x03FF) << 13); + } + + GLM_FUNC_QUALIFIER glm::uint floatTo11bit(float x) + { + if(x == 0.0f) + return 0u; + else if(glm::isnan(x)) + return ~0u; + else if(glm::isinf(x)) + return 0x1Fu << 6u; + + uint Pack = 0u; + memcpy(&Pack, &x, sizeof(Pack)); + return float2packed11(Pack); + } + + GLM_FUNC_QUALIFIER float packed11bitToFloat(glm::uint x) + { + if(x == 0) + return 0.0f; + else if(x == ((1 << 11) - 1)) + return ~0;//NaN + else if(x == (0x1f << 6)) + return ~0;//Inf + + uint Result = packed11ToFloat(x); + + float Temp = 0; + memcpy(&Temp, &Result, sizeof(Temp)); + return Temp; + } + + GLM_FUNC_QUALIFIER glm::uint floatTo10bit(float x) + { + if(x == 0.0f) + return 0u; + else if(glm::isnan(x)) + return ~0u; + else if(glm::isinf(x)) + return 0x1Fu << 5u; + + uint Pack = 0; + memcpy(&Pack, &x, sizeof(Pack)); + return float2packed10(Pack); + } + + GLM_FUNC_QUALIFIER float packed10bitToFloat(glm::uint x) + { + if(x == 0) + return 0.0f; + else if(x == ((1 << 10) - 1)) + return ~0;//NaN + else if(x == (0x1f << 5)) + return ~0;//Inf + + uint Result = packed10ToFloat(x); + + float Temp = 0; + memcpy(&Temp, &Result, sizeof(Temp)); + return Temp; + } + +// GLM_FUNC_QUALIFIER glm::uint f11_f11_f10(float x, float y, float z) +// { +// return ((floatTo11bit(x) & ((1 << 11) - 1)) << 0) | ((floatTo11bit(y) & ((1 << 11) - 1)) << 11) | ((floatTo10bit(z) & ((1 << 10) - 1)) << 22); +// } + + union u3u3u2 + { + struct + { + uint x : 3; + uint y : 3; + uint z : 2; + } data; + uint8 pack; + }; + + union u4u4 + { + struct + { + uint x : 4; + uint y : 4; + } data; + uint8 pack; + }; + + union u4u4u4u4 + { + struct + { + uint x : 4; + uint y : 4; + uint z : 4; + uint w : 4; + } data; + uint16 pack; + }; + + union u5u6u5 + { + struct + { + uint x : 5; + uint y : 6; + uint z : 5; + } data; + uint16 pack; + }; + + union u5u5u5u1 + { + struct + { + uint x : 5; + uint y : 5; + uint z : 5; + uint w : 1; + } data; + uint16 pack; + }; + + union u10u10u10u2 + { + struct + { + uint x : 10; + uint y : 10; + uint z : 10; + uint w : 2; + } data; + uint32 pack; + }; + + union i10i10i10i2 + { + struct + { + int x : 10; + int y : 10; + int z : 10; + int w : 2; + } data; + uint32 pack; + }; + + union u9u9u9e5 + { + struct + { + uint x : 9; + uint y : 9; + uint z : 9; + uint w : 5; + } data; + uint32 pack; + }; + + template + struct compute_half + {}; + + template + struct compute_half<1, Q> + { + GLM_FUNC_QUALIFIER static vec<1, uint16, Q> pack(vec<1, float, Q> const& v) + { + int16 const Unpack(detail::toFloat16(v.x)); + u16vec1 Packed; + memcpy(&Packed, &Unpack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER static vec<1, float, Q> unpack(vec<1, uint16, Q> const& v) + { + i16vec1 Unpack; + memcpy(&Unpack, &v, sizeof(Unpack)); + return vec<1, float, Q>(detail::toFloat32(v.x)); + } + }; + + template + struct compute_half<2, Q> + { + GLM_FUNC_QUALIFIER static vec<2, uint16, Q> pack(vec<2, float, Q> const& v) + { + vec<2, int16, Q> const Unpack(detail::toFloat16(v.x), detail::toFloat16(v.y)); + u16vec2 Packed; + memcpy(&Packed, &Unpack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER static vec<2, float, Q> unpack(vec<2, uint16, Q> const& v) + { + i16vec2 Unpack; + memcpy(&Unpack, &v, sizeof(Unpack)); + return vec<2, float, Q>(detail::toFloat32(v.x), detail::toFloat32(v.y)); + } + }; + + template + struct compute_half<3, Q> + { + GLM_FUNC_QUALIFIER static vec<3, uint16, Q> pack(vec<3, float, Q> const& v) + { + vec<3, int16, Q> const Unpack(detail::toFloat16(v.x), detail::toFloat16(v.y), detail::toFloat16(v.z)); + u16vec3 Packed; + memcpy(&Packed, &Unpack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER static vec<3, float, Q> unpack(vec<3, uint16, Q> const& v) + { + i16vec3 Unpack; + memcpy(&Unpack, &v, sizeof(Unpack)); + return vec<3, float, Q>(detail::toFloat32(v.x), detail::toFloat32(v.y), detail::toFloat32(v.z)); + } + }; + + template + struct compute_half<4, Q> + { + GLM_FUNC_QUALIFIER static vec<4, uint16, Q> pack(vec<4, float, Q> const& v) + { + vec<4, int16, Q> const Unpack(detail::toFloat16(v.x), detail::toFloat16(v.y), detail::toFloat16(v.z), detail::toFloat16(v.w)); + u16vec4 Packed; + memcpy(&Packed, &Unpack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER static vec<4, float, Q> unpack(vec<4, uint16, Q> const& v) + { + i16vec4 Unpack; + memcpy(&Unpack, &v, sizeof(Unpack)); + return vec<4, float, Q>(detail::toFloat32(v.x), detail::toFloat32(v.y), detail::toFloat32(v.z), detail::toFloat32(v.w)); + } + }; +}//namespace detail + + GLM_FUNC_QUALIFIER uint8 packUnorm1x8(float v) + { + return static_cast(round(clamp(v, 0.0f, 1.0f) * 255.0f)); + } + + GLM_FUNC_QUALIFIER float unpackUnorm1x8(uint8 p) + { + float const Unpack(p); + return Unpack * static_cast(0.0039215686274509803921568627451); // 1 / 255 + } + + GLM_FUNC_QUALIFIER uint16 packUnorm2x8(vec2 const& v) + { + u8vec2 const Topack(round(clamp(v, 0.0f, 1.0f) * 255.0f)); + + uint16 Unpack = 0; + memcpy(&Unpack, &Topack, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER vec2 unpackUnorm2x8(uint16 p) + { + u8vec2 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return vec2(Unpack) * float(0.0039215686274509803921568627451); // 1 / 255 + } + + GLM_FUNC_QUALIFIER uint8 packSnorm1x8(float v) + { + int8 const Topack(static_cast(round(clamp(v ,-1.0f, 1.0f) * 127.0f))); + uint8 Packed = 0; + memcpy(&Packed, &Topack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER float unpackSnorm1x8(uint8 p) + { + int8 Unpack = 0; + memcpy(&Unpack, &p, sizeof(Unpack)); + return clamp( + static_cast(Unpack) * 0.00787401574803149606299212598425f, // 1.0f / 127.0f + -1.0f, 1.0f); + } + + GLM_FUNC_QUALIFIER uint16 packSnorm2x8(vec2 const& v) + { + i8vec2 const Topack(round(clamp(v, -1.0f, 1.0f) * 127.0f)); + uint16 Packed = 0; + memcpy(&Packed, &Topack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER vec2 unpackSnorm2x8(uint16 p) + { + i8vec2 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return clamp( + vec2(Unpack) * 0.00787401574803149606299212598425f, // 1.0f / 127.0f + -1.0f, 1.0f); + } + + GLM_FUNC_QUALIFIER uint16 packUnorm1x16(float s) + { + return static_cast(round(clamp(s, 0.0f, 1.0f) * 65535.0f)); + } + + GLM_FUNC_QUALIFIER float unpackUnorm1x16(uint16 p) + { + float const Unpack(p); + return Unpack * 1.5259021896696421759365224689097e-5f; // 1.0 / 65535.0 + } + + GLM_FUNC_QUALIFIER uint64 packUnorm4x16(vec4 const& v) + { + u16vec4 const Topack(round(clamp(v , 0.0f, 1.0f) * 65535.0f)); + uint64 Packed = 0; + memcpy(&Packed, &Topack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER vec4 unpackUnorm4x16(uint64 p) + { + u16vec4 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return vec4(Unpack) * 1.5259021896696421759365224689097e-5f; // 1.0 / 65535.0 + } + + GLM_FUNC_QUALIFIER uint16 packSnorm1x16(float v) + { + int16 const Topack = static_cast(round(clamp(v ,-1.0f, 1.0f) * 32767.0f)); + uint16 Packed = 0; + memcpy(&Packed, &Topack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER float unpackSnorm1x16(uint16 p) + { + int16 Unpack = 0; + memcpy(&Unpack, &p, sizeof(Unpack)); + return clamp( + static_cast(Unpack) * 3.0518509475997192297128208258309e-5f, //1.0f / 32767.0f, + -1.0f, 1.0f); + } + + GLM_FUNC_QUALIFIER uint64 packSnorm4x16(vec4 const& v) + { + i16vec4 const Topack(round(clamp(v ,-1.0f, 1.0f) * 32767.0f)); + uint64 Packed = 0; + memcpy(&Packed, &Topack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER vec4 unpackSnorm4x16(uint64 p) + { + i16vec4 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return clamp( + vec4(Unpack) * 3.0518509475997192297128208258309e-5f, //1.0f / 32767.0f, + -1.0f, 1.0f); + } + + GLM_FUNC_QUALIFIER uint16 packHalf1x16(float v) + { + int16 const Topack(detail::toFloat16(v)); + uint16 Packed = 0; + memcpy(&Packed, &Topack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER float unpackHalf1x16(uint16 v) + { + int16 Unpack = 0; + memcpy(&Unpack, &v, sizeof(Unpack)); + return detail::toFloat32(Unpack); + } + + GLM_FUNC_QUALIFIER uint64 packHalf4x16(glm::vec4 const& v) + { + i16vec4 const Unpack( + detail::toFloat16(v.x), + detail::toFloat16(v.y), + detail::toFloat16(v.z), + detail::toFloat16(v.w)); + uint64 Packed = 0; + memcpy(&Packed, &Unpack, sizeof(Packed)); + return Packed; + } + + GLM_FUNC_QUALIFIER glm::vec4 unpackHalf4x16(uint64 v) + { + i16vec4 Unpack; + memcpy(&Unpack, &v, sizeof(Unpack)); + return vec4( + detail::toFloat32(Unpack.x), + detail::toFloat32(Unpack.y), + detail::toFloat32(Unpack.z), + detail::toFloat32(Unpack.w)); + } + + GLM_FUNC_QUALIFIER uint32 packI3x10_1x2(ivec4 const& v) + { + detail::i10i10i10i2 Result; + Result.data.x = v.x; + Result.data.y = v.y; + Result.data.z = v.z; + Result.data.w = v.w; + return Result.pack; + } + + GLM_FUNC_QUALIFIER ivec4 unpackI3x10_1x2(uint32 v) + { + detail::i10i10i10i2 Unpack; + Unpack.pack = v; + return ivec4( + Unpack.data.x, + Unpack.data.y, + Unpack.data.z, + Unpack.data.w); + } + + GLM_FUNC_QUALIFIER uint32 packU3x10_1x2(uvec4 const& v) + { + detail::u10u10u10u2 Result; + Result.data.x = v.x; + Result.data.y = v.y; + Result.data.z = v.z; + Result.data.w = v.w; + return Result.pack; + } + + GLM_FUNC_QUALIFIER uvec4 unpackU3x10_1x2(uint32 v) + { + detail::u10u10u10u2 Unpack; + Unpack.pack = v; + return uvec4( + Unpack.data.x, + Unpack.data.y, + Unpack.data.z, + Unpack.data.w); + } + + GLM_FUNC_QUALIFIER uint32 packSnorm3x10_1x2(vec4 const& v) + { + ivec4 const Pack(round(clamp(v,-1.0f, 1.0f) * vec4(511.f, 511.f, 511.f, 1.f))); + + detail::i10i10i10i2 Result; + Result.data.x = Pack.x; + Result.data.y = Pack.y; + Result.data.z = Pack.z; + Result.data.w = Pack.w; + return Result.pack; + } + + GLM_FUNC_QUALIFIER vec4 unpackSnorm3x10_1x2(uint32 v) + { + detail::i10i10i10i2 Unpack; + Unpack.pack = v; + + vec4 const Result(Unpack.data.x, Unpack.data.y, Unpack.data.z, Unpack.data.w); + + return clamp(Result * vec4(1.f / 511.f, 1.f / 511.f, 1.f / 511.f, 1.f), -1.0f, 1.0f); + } + + GLM_FUNC_QUALIFIER uint32 packUnorm3x10_1x2(vec4 const& v) + { + uvec4 const Unpack(round(clamp(v, 0.0f, 1.0f) * vec4(1023.f, 1023.f, 1023.f, 3.f))); + + detail::u10u10u10u2 Result; + Result.data.x = Unpack.x; + Result.data.y = Unpack.y; + Result.data.z = Unpack.z; + Result.data.w = Unpack.w; + return Result.pack; + } + + GLM_FUNC_QUALIFIER vec4 unpackUnorm3x10_1x2(uint32 v) + { + vec4 const ScaleFactors(1.0f / 1023.f, 1.0f / 1023.f, 1.0f / 1023.f, 1.0f / 3.f); + + detail::u10u10u10u2 Unpack; + Unpack.pack = v; + return vec4(Unpack.data.x, Unpack.data.y, Unpack.data.z, Unpack.data.w) * ScaleFactors; + } + + GLM_FUNC_QUALIFIER uint32 packF2x11_1x10(vec3 const& v) + { + return + ((detail::floatTo11bit(v.x) & ((1 << 11) - 1)) << 0) | + ((detail::floatTo11bit(v.y) & ((1 << 11) - 1)) << 11) | + ((detail::floatTo10bit(v.z) & ((1 << 10) - 1)) << 22); + } + + GLM_FUNC_QUALIFIER vec3 unpackF2x11_1x10(uint32 v) + { + return vec3( + detail::packed11bitToFloat(v >> 0), + detail::packed11bitToFloat(v >> 11), + detail::packed10bitToFloat(v >> 22)); + } + + GLM_FUNC_QUALIFIER uint32 packF3x9_E1x5(vec3 const& v) + { + float const SharedExpMax = (pow(2.0f, 9.0f - 1.0f) / pow(2.0f, 9.0f)) * pow(2.0f, 31.f - 15.f); + vec3 const Color = clamp(v, 0.0f, SharedExpMax); + float const MaxColor = max(Color.x, max(Color.y, Color.z)); + + float const ExpSharedP = max(-15.f - 1.f, floor(log2(MaxColor))) + 1.0f + 15.f; + float const MaxShared = floor(MaxColor / pow(2.0f, (ExpSharedP - 15.f - 9.f)) + 0.5f); + float const ExpShared = equal(MaxShared, pow(2.0f, 9.0f), epsilon()) ? ExpSharedP + 1.0f : ExpSharedP; + + uvec3 const ColorComp(floor(Color / pow(2.f, (ExpShared - 15.f - 9.f)) + 0.5f)); + + detail::u9u9u9e5 Unpack; + Unpack.data.x = ColorComp.x; + Unpack.data.y = ColorComp.y; + Unpack.data.z = ColorComp.z; + Unpack.data.w = uint(ExpShared); + return Unpack.pack; + } + + GLM_FUNC_QUALIFIER vec3 unpackF3x9_E1x5(uint32 v) + { + detail::u9u9u9e5 Unpack; + Unpack.pack = v; + + return vec3(Unpack.data.x, Unpack.data.y, Unpack.data.z) * pow(2.0f, Unpack.data.w - 15.f - 9.f); + } + + // Based on Brian Karis http://graphicrants.blogspot.fr/2009/04/rgbm-color-encoding.html + template + GLM_FUNC_QUALIFIER vec<4, T, Q> packRGBM(vec<3, T, Q> const& rgb) + { + vec<3, T, Q> const Color(rgb * static_cast(1.0 / 6.0)); + T Alpha = clamp(max(max(Color.x, Color.y), max(Color.z, static_cast(1e-6))), static_cast(0), static_cast(1)); + Alpha = ceil(Alpha * static_cast(255.0)) / static_cast(255.0); + return vec<4, T, Q>(Color / Alpha, Alpha); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> unpackRGBM(vec<4, T, Q> const& rgbm) + { + return vec<3, T, Q>(rgbm.x, rgbm.y, rgbm.z) * rgbm.w * static_cast(6); + } + + template + GLM_FUNC_QUALIFIER vec packHalf(vec const& v) + { + return detail::compute_half::pack(v); + } + + template + GLM_FUNC_QUALIFIER vec unpackHalf(vec const& v) + { + return detail::compute_half::unpack(v); + } + + template + GLM_FUNC_QUALIFIER vec packUnorm(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "uintType must be an integer type"); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "floatType must be a floating point type"); + + return vec(round(clamp(v, static_cast(0), static_cast(1)) * static_cast(std::numeric_limits::max()))); + } + + template + GLM_FUNC_QUALIFIER vec unpackUnorm(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "uintType must be an integer type"); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "floatType must be a floating point type"); + + return vec(v) * (static_cast(1) / static_cast(std::numeric_limits::max())); + } + + template + GLM_FUNC_QUALIFIER vec packSnorm(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "uintType must be an integer type"); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "floatType must be a floating point type"); + + return vec(round(clamp(v , static_cast(-1), static_cast(1)) * static_cast(std::numeric_limits::max()))); + } + + template + GLM_FUNC_QUALIFIER vec unpackSnorm(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "uintType must be an integer type"); + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "floatType must be a floating point type"); + + return clamp(vec(v) * (static_cast(1) / static_cast(std::numeric_limits::max())), static_cast(-1), static_cast(1)); + } + + GLM_FUNC_QUALIFIER uint8 packUnorm2x4(vec2 const& v) + { + u32vec2 const Unpack(round(clamp(v, 0.0f, 1.0f) * 15.0f)); + detail::u4u4 Result; + Result.data.x = Unpack.x; + Result.data.y = Unpack.y; + return Result.pack; + } + + GLM_FUNC_QUALIFIER vec2 unpackUnorm2x4(uint8 v) + { + float const ScaleFactor(1.f / 15.f); + detail::u4u4 Unpack; + Unpack.pack = v; + return vec2(Unpack.data.x, Unpack.data.y) * ScaleFactor; + } + + GLM_FUNC_QUALIFIER uint16 packUnorm4x4(vec4 const& v) + { + u32vec4 const Unpack(round(clamp(v, 0.0f, 1.0f) * 15.0f)); + detail::u4u4u4u4 Result; + Result.data.x = Unpack.x; + Result.data.y = Unpack.y; + Result.data.z = Unpack.z; + Result.data.w = Unpack.w; + return Result.pack; + } + + GLM_FUNC_QUALIFIER vec4 unpackUnorm4x4(uint16 v) + { + float const ScaleFactor(1.f / 15.f); + detail::u4u4u4u4 Unpack; + Unpack.pack = v; + return vec4(Unpack.data.x, Unpack.data.y, Unpack.data.z, Unpack.data.w) * ScaleFactor; + } + + GLM_FUNC_QUALIFIER uint16 packUnorm1x5_1x6_1x5(vec3 const& v) + { + u32vec3 const Unpack(round(clamp(v, 0.0f, 1.0f) * vec3(31.f, 63.f, 31.f))); + detail::u5u6u5 Result; + Result.data.x = Unpack.x; + Result.data.y = Unpack.y; + Result.data.z = Unpack.z; + return Result.pack; + } + + GLM_FUNC_QUALIFIER vec3 unpackUnorm1x5_1x6_1x5(uint16 v) + { + vec3 const ScaleFactor(1.f / 31.f, 1.f / 63.f, 1.f / 31.f); + detail::u5u6u5 Unpack; + Unpack.pack = v; + return vec3(Unpack.data.x, Unpack.data.y, Unpack.data.z) * ScaleFactor; + } + + GLM_FUNC_QUALIFIER uint16 packUnorm3x5_1x1(vec4 const& v) + { + u32vec4 const Unpack(round(clamp(v, 0.0f, 1.0f) * vec4(31.f, 31.f, 31.f, 1.f))); + detail::u5u5u5u1 Result; + Result.data.x = Unpack.x; + Result.data.y = Unpack.y; + Result.data.z = Unpack.z; + Result.data.w = Unpack.w; + return Result.pack; + } + + GLM_FUNC_QUALIFIER vec4 unpackUnorm3x5_1x1(uint16 v) + { + vec4 const ScaleFactor(1.f / 31.f, 1.f / 31.f, 1.f / 31.f, 1.f); + detail::u5u5u5u1 Unpack; + Unpack.pack = v; + return vec4(Unpack.data.x, Unpack.data.y, Unpack.data.z, Unpack.data.w) * ScaleFactor; + } + + GLM_FUNC_QUALIFIER uint8 packUnorm2x3_1x2(vec3 const& v) + { + u32vec3 const Unpack(round(clamp(v, 0.0f, 1.0f) * vec3(7.f, 7.f, 3.f))); + detail::u3u3u2 Result; + Result.data.x = Unpack.x; + Result.data.y = Unpack.y; + Result.data.z = Unpack.z; + return Result.pack; + } + + GLM_FUNC_QUALIFIER vec3 unpackUnorm2x3_1x2(uint8 v) + { + vec3 const ScaleFactor(1.f / 7.f, 1.f / 7.f, 1.f / 3.f); + detail::u3u3u2 Unpack; + Unpack.pack = v; + return vec3(Unpack.data.x, Unpack.data.y, Unpack.data.z) * ScaleFactor; + } + + GLM_FUNC_QUALIFIER int16 packInt2x8(i8vec2 const& v) + { + int16 Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER i8vec2 unpackInt2x8(int16 p) + { + i8vec2 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER uint16 packUint2x8(u8vec2 const& v) + { + uint16 Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER u8vec2 unpackUint2x8(uint16 p) + { + u8vec2 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER int32 packInt4x8(i8vec4 const& v) + { + int32 Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER i8vec4 unpackInt4x8(int32 p) + { + i8vec4 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER uint32 packUint4x8(u8vec4 const& v) + { + uint32 Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER u8vec4 unpackUint4x8(uint32 p) + { + u8vec4 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER int packInt2x16(i16vec2 const& v) + { + int Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER i16vec2 unpackInt2x16(int p) + { + i16vec2 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER int64 packInt4x16(i16vec4 const& v) + { + int64 Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER i16vec4 unpackInt4x16(int64 p) + { + i16vec4 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER uint packUint2x16(u16vec2 const& v) + { + uint Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER u16vec2 unpackUint2x16(uint p) + { + u16vec2 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER uint64 packUint4x16(u16vec4 const& v) + { + uint64 Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER u16vec4 unpackUint4x16(uint64 p) + { + u16vec4 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER int64 packInt2x32(i32vec2 const& v) + { + int64 Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER i32vec2 unpackInt2x32(int64 p) + { + i32vec2 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } + + GLM_FUNC_QUALIFIER uint64 packUint2x32(u32vec2 const& v) + { + uint64 Pack = 0; + memcpy(&Pack, &v, sizeof(Pack)); + return Pack; + } + + GLM_FUNC_QUALIFIER u32vec2 unpackUint2x32(uint64 p) + { + u32vec2 Unpack; + memcpy(&Unpack, &p, sizeof(Unpack)); + return Unpack; + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/gtc/quaternion.hpp b/MacroLibX/third_party/glm/gtc/quaternion.hpp new file mode 100644 index 0000000..359e072 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/quaternion.hpp @@ -0,0 +1,173 @@ +/// @ref gtc_quaternion +/// @file glm/gtc/quaternion.hpp +/// +/// @see core (dependence) +/// @see gtc_constants (dependence) +/// +/// @defgroup gtc_quaternion GLM_GTC_quaternion +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Defines a templated quaternion type and several quaternion operations. + +#pragma once + +// Dependency: +#include "../gtc/constants.hpp" +#include "../gtc/matrix_transform.hpp" +#include "../ext/vector_relational.hpp" +#include "../ext/quaternion_common.hpp" +#include "../ext/quaternion_float.hpp" +#include "../ext/quaternion_float_precision.hpp" +#include "../ext/quaternion_double.hpp" +#include "../ext/quaternion_double_precision.hpp" +#include "../ext/quaternion_relational.hpp" +#include "../ext/quaternion_geometric.hpp" +#include "../ext/quaternion_trigonometric.hpp" +#include "../ext/quaternion_transform.hpp" +#include "../detail/type_mat3x3.hpp" +#include "../detail/type_mat4x4.hpp" +#include "../detail/type_vec3.hpp" +#include "../detail/type_vec4.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_quaternion extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_quaternion + /// @{ + + /// Returns euler angles, pitch as x, yaw as y, roll as z. + /// The result is expressed in radians. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see gtc_quaternion + template + GLM_FUNC_DECL vec<3, T, Q> eulerAngles(qua const& x); + + /// Returns roll value of euler angles expressed in radians. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see gtc_quaternion + template + GLM_FUNC_DECL T roll(qua const& x); + + /// Returns pitch value of euler angles expressed in radians. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see gtc_quaternion + template + GLM_FUNC_DECL T pitch(qua const& x); + + /// Returns yaw value of euler angles expressed in radians. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see gtc_quaternion + template + GLM_FUNC_DECL T yaw(qua const& x); + + /// Converts a quaternion to a 3 * 3 matrix. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see gtc_quaternion + template + GLM_FUNC_DECL mat<3, 3, T, Q> mat3_cast(qua const& x); + + /// Converts a quaternion to a 4 * 4 matrix. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see gtc_quaternion + template + GLM_FUNC_DECL mat<4, 4, T, Q> mat4_cast(qua const& x); + + /// Converts a pure rotation 3 * 3 matrix to a quaternion. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see gtc_quaternion + template + GLM_FUNC_DECL qua quat_cast(mat<3, 3, T, Q> const& x); + + /// Converts a pure rotation 4 * 4 matrix to a quaternion. + /// + /// @tparam T Floating-point scalar types. + /// + /// @see gtc_quaternion + template + GLM_FUNC_DECL qua quat_cast(mat<4, 4, T, Q> const& x); + + /// Returns the component-wise comparison result of x < y. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_quaternion_relational + template + GLM_FUNC_DECL vec<4, bool, Q> lessThan(qua const& x, qua const& y); + + /// Returns the component-wise comparison of result x <= y. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_quaternion_relational + template + GLM_FUNC_DECL vec<4, bool, Q> lessThanEqual(qua const& x, qua const& y); + + /// Returns the component-wise comparison of result x > y. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_quaternion_relational + template + GLM_FUNC_DECL vec<4, bool, Q> greaterThan(qua const& x, qua const& y); + + /// Returns the component-wise comparison of result x >= y. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_quaternion_relational + template + GLM_FUNC_DECL vec<4, bool, Q> greaterThanEqual(qua const& x, qua const& y); + + /// Build a look at quaternion based on the default handedness. + /// + /// @param direction Desired forward direction. Needs to be normalized. + /// @param up Up vector, how the camera is oriented. Typically (0, 1, 0). + template + GLM_FUNC_DECL qua quatLookAt( + vec<3, T, Q> const& direction, + vec<3, T, Q> const& up); + + /// Build a right-handed look at quaternion. + /// + /// @param direction Desired forward direction onto which the -z-axis gets mapped. Needs to be normalized. + /// @param up Up vector, how the camera is oriented. Typically (0, 1, 0). + template + GLM_FUNC_DECL qua quatLookAtRH( + vec<3, T, Q> const& direction, + vec<3, T, Q> const& up); + + /// Build a left-handed look at quaternion. + /// + /// @param direction Desired forward direction onto which the +z-axis gets mapped. Needs to be normalized. + /// @param up Up vector, how the camera is oriented. Typically (0, 1, 0). + template + GLM_FUNC_DECL qua quatLookAtLH( + vec<3, T, Q> const& direction, + vec<3, T, Q> const& up); + /// @} +} //namespace glm + +#include "quaternion.inl" diff --git a/MacroLibX/third_party/glm/gtc/quaternion.inl b/MacroLibX/third_party/glm/gtc/quaternion.inl new file mode 100644 index 0000000..9dd037e --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/quaternion.inl @@ -0,0 +1,200 @@ +#include "../trigonometric.hpp" +#include "../geometric.hpp" +#include "../exponential.hpp" +#include "epsilon.hpp" +#include + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> eulerAngles(qua const& x) + { + return vec<3, T, Q>(pitch(x), yaw(x), roll(x)); + } + + template + GLM_FUNC_QUALIFIER T roll(qua const& q) + { + return static_cast(atan(static_cast(2) * (q.x * q.y + q.w * q.z), q.w * q.w + q.x * q.x - q.y * q.y - q.z * q.z)); + } + + template + GLM_FUNC_QUALIFIER T pitch(qua const& q) + { + //return T(atan(T(2) * (q.y * q.z + q.w * q.x), q.w * q.w - q.x * q.x - q.y * q.y + q.z * q.z)); + T const y = static_cast(2) * (q.y * q.z + q.w * q.x); + T const x = q.w * q.w - q.x * q.x - q.y * q.y + q.z * q.z; + + if(all(equal(vec<2, T, Q>(x, y), vec<2, T, Q>(0), epsilon()))) //avoid atan2(0,0) - handle singularity - Matiis + return static_cast(static_cast(2) * atan(q.x, q.w)); + + return static_cast(atan(y, x)); + } + + template + GLM_FUNC_QUALIFIER T yaw(qua const& q) + { + return asin(clamp(static_cast(-2) * (q.x * q.z - q.w * q.y), static_cast(-1), static_cast(1))); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> mat3_cast(qua const& q) + { + mat<3, 3, T, Q> Result(T(1)); + T qxx(q.x * q.x); + T qyy(q.y * q.y); + T qzz(q.z * q.z); + T qxz(q.x * q.z); + T qxy(q.x * q.y); + T qyz(q.y * q.z); + T qwx(q.w * q.x); + T qwy(q.w * q.y); + T qwz(q.w * q.z); + + Result[0][0] = T(1) - T(2) * (qyy + qzz); + Result[0][1] = T(2) * (qxy + qwz); + Result[0][2] = T(2) * (qxz - qwy); + + Result[1][0] = T(2) * (qxy - qwz); + Result[1][1] = T(1) - T(2) * (qxx + qzz); + Result[1][2] = T(2) * (qyz + qwx); + + Result[2][0] = T(2) * (qxz + qwy); + Result[2][1] = T(2) * (qyz - qwx); + Result[2][2] = T(1) - T(2) * (qxx + qyy); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> mat4_cast(qua const& q) + { + return mat<4, 4, T, Q>(mat3_cast(q)); + } + + template + GLM_FUNC_QUALIFIER qua quat_cast(mat<3, 3, T, Q> const& m) + { + T fourXSquaredMinus1 = m[0][0] - m[1][1] - m[2][2]; + T fourYSquaredMinus1 = m[1][1] - m[0][0] - m[2][2]; + T fourZSquaredMinus1 = m[2][2] - m[0][0] - m[1][1]; + T fourWSquaredMinus1 = m[0][0] + m[1][1] + m[2][2]; + + int biggestIndex = 0; + T fourBiggestSquaredMinus1 = fourWSquaredMinus1; + if(fourXSquaredMinus1 > fourBiggestSquaredMinus1) + { + fourBiggestSquaredMinus1 = fourXSquaredMinus1; + biggestIndex = 1; + } + if(fourYSquaredMinus1 > fourBiggestSquaredMinus1) + { + fourBiggestSquaredMinus1 = fourYSquaredMinus1; + biggestIndex = 2; + } + if(fourZSquaredMinus1 > fourBiggestSquaredMinus1) + { + fourBiggestSquaredMinus1 = fourZSquaredMinus1; + biggestIndex = 3; + } + + T biggestVal = sqrt(fourBiggestSquaredMinus1 + static_cast(1)) * static_cast(0.5); + T mult = static_cast(0.25) / biggestVal; + + switch(biggestIndex) + { + case 0: + return qua(biggestVal, (m[1][2] - m[2][1]) * mult, (m[2][0] - m[0][2]) * mult, (m[0][1] - m[1][0]) * mult); + case 1: + return qua((m[1][2] - m[2][1]) * mult, biggestVal, (m[0][1] + m[1][0]) * mult, (m[2][0] + m[0][2]) * mult); + case 2: + return qua((m[2][0] - m[0][2]) * mult, (m[0][1] + m[1][0]) * mult, biggestVal, (m[1][2] + m[2][1]) * mult); + case 3: + return qua((m[0][1] - m[1][0]) * mult, (m[2][0] + m[0][2]) * mult, (m[1][2] + m[2][1]) * mult, biggestVal); + default: // Silence a -Wswitch-default warning in GCC. Should never actually get here. Assert is just for sanity. + assert(false); + return qua(1, 0, 0, 0); + } + } + + template + GLM_FUNC_QUALIFIER qua quat_cast(mat<4, 4, T, Q> const& m4) + { + return quat_cast(mat<3, 3, T, Q>(m4)); + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> lessThan(qua const& x, qua const& y) + { + vec<4, bool, Q> Result; + for(length_t i = 0; i < x.length(); ++i) + Result[i] = x[i] < y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> lessThanEqual(qua const& x, qua const& y) + { + vec<4, bool, Q> Result; + for(length_t i = 0; i < x.length(); ++i) + Result[i] = x[i] <= y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> greaterThan(qua const& x, qua const& y) + { + vec<4, bool, Q> Result; + for(length_t i = 0; i < x.length(); ++i) + Result[i] = x[i] > y[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> greaterThanEqual(qua const& x, qua const& y) + { + vec<4, bool, Q> Result; + for(length_t i = 0; i < x.length(); ++i) + Result[i] = x[i] >= y[i]; + return Result; + } + + + template + GLM_FUNC_QUALIFIER qua quatLookAt(vec<3, T, Q> const& direction, vec<3, T, Q> const& up) + { +# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT + return quatLookAtLH(direction, up); +# else + return quatLookAtRH(direction, up); +# endif + } + + template + GLM_FUNC_QUALIFIER qua quatLookAtRH(vec<3, T, Q> const& direction, vec<3, T, Q> const& up) + { + mat<3, 3, T, Q> Result; + + Result[2] = -direction; + Result[0] = normalize(cross(up, Result[2])); + Result[1] = cross(Result[2], Result[0]); + + return quat_cast(Result); + } + + template + GLM_FUNC_QUALIFIER qua quatLookAtLH(vec<3, T, Q> const& direction, vec<3, T, Q> const& up) + { + mat<3, 3, T, Q> Result; + + Result[2] = direction; + Result[0] = normalize(cross(up, Result[2])); + Result[1] = cross(Result[2], Result[0]); + + return quat_cast(Result); + } +}//namespace glm + +#if GLM_CONFIG_SIMD == GLM_ENABLE +# include "quaternion_simd.inl" +#endif + diff --git a/MacroLibX/third_party/glm/gtc/quaternion_simd.inl b/MacroLibX/third_party/glm/gtc/quaternion_simd.inl new file mode 100644 index 0000000..e69de29 diff --git a/MacroLibX/third_party/glm/gtc/random.hpp b/MacroLibX/third_party/glm/gtc/random.hpp new file mode 100644 index 0000000..9a85958 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/random.hpp @@ -0,0 +1,82 @@ +/// @ref gtc_random +/// @file glm/gtc/random.hpp +/// +/// @see core (dependence) +/// @see gtx_random (extended) +/// +/// @defgroup gtc_random GLM_GTC_random +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Generate random number from various distribution methods. + +#pragma once + +// Dependency: +#include "../ext/scalar_int_sized.hpp" +#include "../ext/scalar_uint_sized.hpp" +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_random extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_random + /// @{ + + /// Generate random numbers in the interval [Min, Max], according a linear distribution + /// + /// @param Min Minimum value included in the sampling + /// @param Max Maximum value included in the sampling + /// @tparam genType Value type. Currently supported: float or double scalars. + /// @see gtc_random + template + GLM_FUNC_DECL genType linearRand(genType Min, genType Max); + + /// Generate random numbers in the interval [Min, Max], according a linear distribution + /// + /// @param Min Minimum value included in the sampling + /// @param Max Maximum value included in the sampling + /// @tparam T Value type. Currently supported: float or double. + /// + /// @see gtc_random + template + GLM_FUNC_DECL vec linearRand(vec const& Min, vec const& Max); + + /// Generate random numbers in the interval [Min, Max], according a gaussian distribution + /// + /// @see gtc_random + template + GLM_FUNC_DECL genType gaussRand(genType Mean, genType Deviation); + + /// Generate a random 2D vector which coordinates are regulary distributed on a circle of a given radius + /// + /// @see gtc_random + template + GLM_FUNC_DECL vec<2, T, defaultp> circularRand(T Radius); + + /// Generate a random 3D vector which coordinates are regulary distributed on a sphere of a given radius + /// + /// @see gtc_random + template + GLM_FUNC_DECL vec<3, T, defaultp> sphericalRand(T Radius); + + /// Generate a random 2D vector which coordinates are regulary distributed within the area of a disk of a given radius + /// + /// @see gtc_random + template + GLM_FUNC_DECL vec<2, T, defaultp> diskRand(T Radius); + + /// Generate a random 3D vector which coordinates are regulary distributed within the volume of a ball of a given radius + /// + /// @see gtc_random + template + GLM_FUNC_DECL vec<3, T, defaultp> ballRand(T Radius); + + /// @} +}//namespace glm + +#include "random.inl" diff --git a/MacroLibX/third_party/glm/gtc/random.inl b/MacroLibX/third_party/glm/gtc/random.inl new file mode 100644 index 0000000..7048509 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/random.inl @@ -0,0 +1,303 @@ +#include "../geometric.hpp" +#include "../exponential.hpp" +#include "../trigonometric.hpp" +#include "../detail/type_vec1.hpp" +#include +#include +#include +#include + +namespace glm{ +namespace detail +{ + template + struct compute_rand + { + GLM_FUNC_QUALIFIER static vec call(); + }; + + template + struct compute_rand<1, uint8, P> + { + GLM_FUNC_QUALIFIER static vec<1, uint8, P> call() + { + return vec<1, uint8, P>( + std::rand() % std::numeric_limits::max()); + } + }; + + template + struct compute_rand<2, uint8, P> + { + GLM_FUNC_QUALIFIER static vec<2, uint8, P> call() + { + return vec<2, uint8, P>( + std::rand() % std::numeric_limits::max(), + std::rand() % std::numeric_limits::max()); + } + }; + + template + struct compute_rand<3, uint8, P> + { + GLM_FUNC_QUALIFIER static vec<3, uint8, P> call() + { + return vec<3, uint8, P>( + std::rand() % std::numeric_limits::max(), + std::rand() % std::numeric_limits::max(), + std::rand() % std::numeric_limits::max()); + } + }; + + template + struct compute_rand<4, uint8, P> + { + GLM_FUNC_QUALIFIER static vec<4, uint8, P> call() + { + return vec<4, uint8, P>( + std::rand() % std::numeric_limits::max(), + std::rand() % std::numeric_limits::max(), + std::rand() % std::numeric_limits::max(), + std::rand() % std::numeric_limits::max()); + } + }; + + template + struct compute_rand + { + GLM_FUNC_QUALIFIER static vec call() + { + return + (vec(compute_rand::call()) << static_cast(8)) | + (vec(compute_rand::call()) << static_cast(0)); + } + }; + + template + struct compute_rand + { + GLM_FUNC_QUALIFIER static vec call() + { + return + (vec(compute_rand::call()) << static_cast(16)) | + (vec(compute_rand::call()) << static_cast(0)); + } + }; + + template + struct compute_rand + { + GLM_FUNC_QUALIFIER static vec call() + { + return + (vec(compute_rand::call()) << static_cast(32)) | + (vec(compute_rand::call()) << static_cast(0)); + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max); + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return (vec(compute_rand::call() % vec(Max + static_cast(1) - Min))) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return (compute_rand::call() % (Max + static_cast(1) - Min)) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return (vec(compute_rand::call() % vec(Max + static_cast(1) - Min))) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return (compute_rand::call() % (Max + static_cast(1) - Min)) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return (vec(compute_rand::call() % vec(Max + static_cast(1) - Min))) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return (compute_rand::call() % (Max + static_cast(1) - Min)) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return (vec(compute_rand::call() % vec(Max + static_cast(1) - Min))) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return (compute_rand::call() % (Max + static_cast(1) - Min)) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return vec(compute_rand::call()) / static_cast(std::numeric_limits::max()) * (Max - Min) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return vec(compute_rand::call()) / static_cast(std::numeric_limits::max()) * (Max - Min) + Min; + } + }; + + template + struct compute_linearRand + { + GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) + { + return vec(compute_rand::call()) / static_cast(std::numeric_limits::max()) * (Max - Min) + Min; + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER genType linearRand(genType Min, genType Max) + { + return detail::compute_linearRand<1, genType, highp>::call( + vec<1, genType, highp>(Min), + vec<1, genType, highp>(Max)).x; + } + + template + GLM_FUNC_QUALIFIER vec linearRand(vec const& Min, vec const& Max) + { + return detail::compute_linearRand::call(Min, Max); + } + + template + GLM_FUNC_QUALIFIER genType gaussRand(genType Mean, genType Deviation) + { + genType w, x1, x2; + + do + { + x1 = linearRand(genType(-1), genType(1)); + x2 = linearRand(genType(-1), genType(1)); + + w = x1 * x1 + x2 * x2; + } while(w > genType(1)); + + return static_cast(x2 * Deviation * Deviation * sqrt((genType(-2) * log(w)) / w) + Mean); + } + + template + GLM_FUNC_QUALIFIER vec gaussRand(vec const& Mean, vec const& Deviation) + { + return detail::functor2::call(gaussRand, Mean, Deviation); + } + + template + GLM_FUNC_QUALIFIER vec<2, T, defaultp> diskRand(T Radius) + { + assert(Radius > static_cast(0)); + + vec<2, T, defaultp> Result(T(0)); + T LenRadius(T(0)); + + do + { + Result = linearRand( + vec<2, T, defaultp>(-Radius), + vec<2, T, defaultp>(Radius)); + LenRadius = length(Result); + } + while(LenRadius > Radius); + + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, defaultp> ballRand(T Radius) + { + assert(Radius > static_cast(0)); + + vec<3, T, defaultp> Result(T(0)); + T LenRadius(T(0)); + + do + { + Result = linearRand( + vec<3, T, defaultp>(-Radius), + vec<3, T, defaultp>(Radius)); + LenRadius = length(Result); + } + while(LenRadius > Radius); + + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<2, T, defaultp> circularRand(T Radius) + { + assert(Radius > static_cast(0)); + + T a = linearRand(T(0), static_cast(6.283185307179586476925286766559)); + return vec<2, T, defaultp>(glm::cos(a), glm::sin(a)) * Radius; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, defaultp> sphericalRand(T Radius) + { + assert(Radius > static_cast(0)); + + T theta = linearRand(T(0), T(6.283185307179586476925286766559f)); + T phi = std::acos(linearRand(T(-1.0f), T(1.0f))); + + T x = std::sin(phi) * std::cos(theta); + T y = std::sin(phi) * std::sin(theta); + T z = std::cos(phi); + + return vec<3, T, defaultp>(x, y, z) * Radius; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/reciprocal.hpp b/MacroLibX/third_party/glm/gtc/reciprocal.hpp new file mode 100644 index 0000000..c7d1330 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/reciprocal.hpp @@ -0,0 +1,135 @@ +/// @ref gtc_reciprocal +/// @file glm/gtc/reciprocal.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_reciprocal GLM_GTC_reciprocal +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Define secant, cosecant and cotangent functions. + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_reciprocal extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_reciprocal + /// @{ + + /// Secant function. + /// hypotenuse / adjacent or 1 / cos(x) + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType sec(genType angle); + + /// Cosecant function. + /// hypotenuse / opposite or 1 / sin(x) + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType csc(genType angle); + + /// Cotangent function. + /// adjacent / opposite or 1 / tan(x) + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType cot(genType angle); + + /// Inverse secant function. + /// + /// @return Return an angle expressed in radians. + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType asec(genType x); + + /// Inverse cosecant function. + /// + /// @return Return an angle expressed in radians. + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType acsc(genType x); + + /// Inverse cotangent function. + /// + /// @return Return an angle expressed in radians. + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType acot(genType x); + + /// Secant hyperbolic function. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType sech(genType angle); + + /// Cosecant hyperbolic function. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType csch(genType angle); + + /// Cotangent hyperbolic function. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType coth(genType angle); + + /// Inverse secant hyperbolic function. + /// + /// @return Return an angle expressed in radians. + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType asech(genType x); + + /// Inverse cosecant hyperbolic function. + /// + /// @return Return an angle expressed in radians. + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType acsch(genType x); + + /// Inverse cotangent hyperbolic function. + /// + /// @return Return an angle expressed in radians. + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtc_reciprocal + template + GLM_FUNC_DECL genType acoth(genType x); + + /// @} +}//namespace glm + +#include "reciprocal.inl" diff --git a/MacroLibX/third_party/glm/gtc/reciprocal.inl b/MacroLibX/third_party/glm/gtc/reciprocal.inl new file mode 100644 index 0000000..d88729e --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/reciprocal.inl @@ -0,0 +1,191 @@ +/// @ref gtc_reciprocal + +#include "../trigonometric.hpp" +#include + +namespace glm +{ + // sec + template + GLM_FUNC_QUALIFIER genType sec(genType angle) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sec' only accept floating-point values"); + return genType(1) / glm::cos(angle); + } + + template + GLM_FUNC_QUALIFIER vec sec(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sec' only accept floating-point inputs"); + return detail::functor1::call(sec, x); + } + + // csc + template + GLM_FUNC_QUALIFIER genType csc(genType angle) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'csc' only accept floating-point values"); + return genType(1) / glm::sin(angle); + } + + template + GLM_FUNC_QUALIFIER vec csc(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'csc' only accept floating-point inputs"); + return detail::functor1::call(csc, x); + } + + // cot + template + GLM_FUNC_QUALIFIER genType cot(genType angle) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'cot' only accept floating-point values"); + + genType const pi_over_2 = genType(3.1415926535897932384626433832795 / 2.0); + return glm::tan(pi_over_2 - angle); + } + + template + GLM_FUNC_QUALIFIER vec cot(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'cot' only accept floating-point inputs"); + return detail::functor1::call(cot, x); + } + + // asec + template + GLM_FUNC_QUALIFIER genType asec(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asec' only accept floating-point values"); + return acos(genType(1) / x); + } + + template + GLM_FUNC_QUALIFIER vec asec(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asec' only accept floating-point inputs"); + return detail::functor1::call(asec, x); + } + + // acsc + template + GLM_FUNC_QUALIFIER genType acsc(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acsc' only accept floating-point values"); + return asin(genType(1) / x); + } + + template + GLM_FUNC_QUALIFIER vec acsc(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acsc' only accept floating-point inputs"); + return detail::functor1::call(acsc, x); + } + + // acot + template + GLM_FUNC_QUALIFIER genType acot(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acot' only accept floating-point values"); + + genType const pi_over_2 = genType(3.1415926535897932384626433832795 / 2.0); + return pi_over_2 - atan(x); + } + + template + GLM_FUNC_QUALIFIER vec acot(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acot' only accept floating-point inputs"); + return detail::functor1::call(acot, x); + } + + // sech + template + GLM_FUNC_QUALIFIER genType sech(genType angle) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sech' only accept floating-point values"); + return genType(1) / glm::cosh(angle); + } + + template + GLM_FUNC_QUALIFIER vec sech(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sech' only accept floating-point inputs"); + return detail::functor1::call(sech, x); + } + + // csch + template + GLM_FUNC_QUALIFIER genType csch(genType angle) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'csch' only accept floating-point values"); + return genType(1) / glm::sinh(angle); + } + + template + GLM_FUNC_QUALIFIER vec csch(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'csch' only accept floating-point inputs"); + return detail::functor1::call(csch, x); + } + + // coth + template + GLM_FUNC_QUALIFIER genType coth(genType angle) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'coth' only accept floating-point values"); + return glm::cosh(angle) / glm::sinh(angle); + } + + template + GLM_FUNC_QUALIFIER vec coth(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'coth' only accept floating-point inputs"); + return detail::functor1::call(coth, x); + } + + // asech + template + GLM_FUNC_QUALIFIER genType asech(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asech' only accept floating-point values"); + return acosh(genType(1) / x); + } + + template + GLM_FUNC_QUALIFIER vec asech(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asech' only accept floating-point inputs"); + return detail::functor1::call(asech, x); + } + + // acsch + template + GLM_FUNC_QUALIFIER genType acsch(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acsch' only accept floating-point values"); + return asinh(genType(1) / x); + } + + template + GLM_FUNC_QUALIFIER vec acsch(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acsch' only accept floating-point inputs"); + return detail::functor1::call(acsch, x); + } + + // acoth + template + GLM_FUNC_QUALIFIER genType acoth(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acoth' only accept floating-point values"); + return atanh(genType(1) / x); + } + + template + GLM_FUNC_QUALIFIER vec acoth(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acoth' only accept floating-point inputs"); + return detail::functor1::call(acoth, x); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/round.hpp b/MacroLibX/third_party/glm/gtc/round.hpp new file mode 100644 index 0000000..56edbbc --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/round.hpp @@ -0,0 +1,160 @@ +/// @ref gtc_round +/// @file glm/gtc/round.hpp +/// +/// @see core (dependence) +/// @see gtc_round (dependence) +/// +/// @defgroup gtc_round GLM_GTC_round +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Rounding value to specific boundings + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../detail/_vectorize.hpp" +#include "../vector_relational.hpp" +#include "../common.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_round extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_round + /// @{ + + /// Return the power of two number which value is just higher the input value, + /// round up to a power of two. + /// + /// @see gtc_round + template + GLM_FUNC_DECL genIUType ceilPowerOfTwo(genIUType v); + + /// Return the power of two number which value is just higher the input value, + /// round up to a power of two. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_round + template + GLM_FUNC_DECL vec ceilPowerOfTwo(vec const& v); + + /// Return the power of two number which value is just lower the input value, + /// round down to a power of two. + /// + /// @see gtc_round + template + GLM_FUNC_DECL genIUType floorPowerOfTwo(genIUType v); + + /// Return the power of two number which value is just lower the input value, + /// round down to a power of two. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_round + template + GLM_FUNC_DECL vec floorPowerOfTwo(vec const& v); + + /// Return the power of two number which value is the closet to the input value. + /// + /// @see gtc_round + template + GLM_FUNC_DECL genIUType roundPowerOfTwo(genIUType v); + + /// Return the power of two number which value is the closet to the input value. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_round + template + GLM_FUNC_DECL vec roundPowerOfTwo(vec const& v); + + /// Higher multiple number of Source. + /// + /// @tparam genType Floating-point or integer scalar or vector types. + /// + /// @param v Source value to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see gtc_round + template + GLM_FUNC_DECL genType ceilMultiple(genType v, genType Multiple); + + /// Higher multiple number of Source. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @param v Source values to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see gtc_round + template + GLM_FUNC_DECL vec ceilMultiple(vec const& v, vec const& Multiple); + + /// Lower multiple number of Source. + /// + /// @tparam genType Floating-point or integer scalar or vector types. + /// + /// @param v Source value to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see gtc_round + template + GLM_FUNC_DECL genType floorMultiple(genType v, genType Multiple); + + /// Lower multiple number of Source. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @param v Source values to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see gtc_round + template + GLM_FUNC_DECL vec floorMultiple(vec const& v, vec const& Multiple); + + /// Lower multiple number of Source. + /// + /// @tparam genType Floating-point or integer scalar or vector types. + /// + /// @param v Source value to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see gtc_round + template + GLM_FUNC_DECL genType roundMultiple(genType v, genType Multiple); + + /// Lower multiple number of Source. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @param v Source values to which is applied the function + /// @param Multiple Must be a null or positive value + /// + /// @see gtc_round + template + GLM_FUNC_DECL vec roundMultiple(vec const& v, vec const& Multiple); + + /// @} +} //namespace glm + +#include "round.inl" diff --git a/MacroLibX/third_party/glm/gtc/round.inl b/MacroLibX/third_party/glm/gtc/round.inl new file mode 100644 index 0000000..48411e4 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/round.inl @@ -0,0 +1,155 @@ +/// @ref gtc_round + +#include "../integer.hpp" +#include "../ext/vector_integer.hpp" + +namespace glm{ +namespace detail +{ + template + struct compute_roundMultiple {}; + + template<> + struct compute_roundMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + if (Source >= genType(0)) + return Source - std::fmod(Source, Multiple); + else + { + genType Tmp = Source + genType(1); + return Tmp - std::fmod(Tmp, Multiple) - Multiple; + } + } + }; + + template<> + struct compute_roundMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + if (Source >= genType(0)) + return Source - Source % Multiple; + else + { + genType Tmp = Source + genType(1); + return Tmp - Tmp % Multiple - Multiple; + } + } + }; + + template<> + struct compute_roundMultiple + { + template + GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) + { + if (Source >= genType(0)) + return Source - Source % Multiple; + else + { + genType Tmp = Source + genType(1); + return Tmp - Tmp % Multiple - Multiple; + } + } + }; +}//namespace detail + + ////////////////// + // ceilPowerOfTwo + + template + GLM_FUNC_QUALIFIER genType ceilPowerOfTwo(genType value) + { + return detail::compute_ceilPowerOfTwo<1, genType, defaultp, std::numeric_limits::is_signed>::call(vec<1, genType, defaultp>(value)).x; + } + + template + GLM_FUNC_QUALIFIER vec ceilPowerOfTwo(vec const& v) + { + return detail::compute_ceilPowerOfTwo::is_signed>::call(v); + } + + /////////////////// + // floorPowerOfTwo + + template + GLM_FUNC_QUALIFIER genType floorPowerOfTwo(genType value) + { + return isPowerOfTwo(value) ? value : static_cast(1) << findMSB(value); + } + + template + GLM_FUNC_QUALIFIER vec floorPowerOfTwo(vec const& v) + { + return detail::functor1::call(floorPowerOfTwo, v); + } + + /////////////////// + // roundPowerOfTwo + + template + GLM_FUNC_QUALIFIER genIUType roundPowerOfTwo(genIUType value) + { + if(isPowerOfTwo(value)) + return value; + + genIUType const prev = static_cast(1) << findMSB(value); + genIUType const next = prev << static_cast(1); + return (next - value) < (value - prev) ? next : prev; + } + + template + GLM_FUNC_QUALIFIER vec roundPowerOfTwo(vec const& v) + { + return detail::functor1::call(roundPowerOfTwo, v); + } + + ////////////////////// + // ceilMultiple + + template + GLM_FUNC_QUALIFIER genType ceilMultiple(genType Source, genType Multiple) + { + return detail::compute_ceilMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); + } + + template + GLM_FUNC_QUALIFIER vec ceilMultiple(vec const& Source, vec const& Multiple) + { + return detail::functor2::call(ceilMultiple, Source, Multiple); + } + + ////////////////////// + // floorMultiple + + template + GLM_FUNC_QUALIFIER genType floorMultiple(genType Source, genType Multiple) + { + return detail::compute_floorMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); + } + + template + GLM_FUNC_QUALIFIER vec floorMultiple(vec const& Source, vec const& Multiple) + { + return detail::functor2::call(floorMultiple, Source, Multiple); + } + + ////////////////////// + // roundMultiple + + template + GLM_FUNC_QUALIFIER genType roundMultiple(genType Source, genType Multiple) + { + return detail::compute_roundMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); + } + + template + GLM_FUNC_QUALIFIER vec roundMultiple(vec const& Source, vec const& Multiple) + { + return detail::functor2::call(roundMultiple, Source, Multiple); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/type_aligned.hpp b/MacroLibX/third_party/glm/gtc/type_aligned.hpp new file mode 100644 index 0000000..5403abf --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/type_aligned.hpp @@ -0,0 +1,1315 @@ +/// @ref gtc_type_aligned +/// @file glm/gtc/type_aligned.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_type_aligned GLM_GTC_type_aligned +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Aligned types allowing SIMD optimizations of vectors and matrices types + +#pragma once + +#if (GLM_CONFIG_ALIGNED_GENTYPES == GLM_DISABLE) +# error "GLM: Aligned gentypes require to enable C++ language extensions. Define GLM_FORCE_ALIGNED_GENTYPES before including GLM headers to use aligned types." +#endif + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_type_aligned extension included") +#endif + +#include "../mat4x4.hpp" +#include "../mat4x3.hpp" +#include "../mat4x2.hpp" +#include "../mat3x4.hpp" +#include "../mat3x3.hpp" +#include "../mat3x2.hpp" +#include "../mat2x4.hpp" +#include "../mat2x3.hpp" +#include "../mat2x2.hpp" +#include "../gtc/vec1.hpp" +#include "../vec2.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" + +namespace glm +{ + /// @addtogroup gtc_type_aligned + /// @{ + + // -- *vec1 -- + + /// 1 component vector aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<1, float, aligned_highp> aligned_highp_vec1; + + /// 1 component vector aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<1, float, aligned_mediump> aligned_mediump_vec1; + + /// 1 component vector aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<1, float, aligned_lowp> aligned_lowp_vec1; + + /// 1 component vector aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<1, double, aligned_highp> aligned_highp_dvec1; + + /// 1 component vector aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<1, double, aligned_mediump> aligned_mediump_dvec1; + + /// 1 component vector aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<1, double, aligned_lowp> aligned_lowp_dvec1; + + /// 1 component vector aligned in memory of signed integer numbers. + typedef vec<1, int, aligned_highp> aligned_highp_ivec1; + + /// 1 component vector aligned in memory of signed integer numbers. + typedef vec<1, int, aligned_mediump> aligned_mediump_ivec1; + + /// 1 component vector aligned in memory of signed integer numbers. + typedef vec<1, int, aligned_lowp> aligned_lowp_ivec1; + + /// 1 component vector aligned in memory of unsigned integer numbers. + typedef vec<1, uint, aligned_highp> aligned_highp_uvec1; + + /// 1 component vector aligned in memory of unsigned integer numbers. + typedef vec<1, uint, aligned_mediump> aligned_mediump_uvec1; + + /// 1 component vector aligned in memory of unsigned integer numbers. + typedef vec<1, uint, aligned_lowp> aligned_lowp_uvec1; + + /// 1 component vector aligned in memory of bool values. + typedef vec<1, bool, aligned_highp> aligned_highp_bvec1; + + /// 1 component vector aligned in memory of bool values. + typedef vec<1, bool, aligned_mediump> aligned_mediump_bvec1; + + /// 1 component vector aligned in memory of bool values. + typedef vec<1, bool, aligned_lowp> aligned_lowp_bvec1; + + /// 1 component vector tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<1, float, packed_highp> packed_highp_vec1; + + /// 1 component vector tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<1, float, packed_mediump> packed_mediump_vec1; + + /// 1 component vector tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<1, float, packed_lowp> packed_lowp_vec1; + + /// 1 component vector tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<1, double, packed_highp> packed_highp_dvec1; + + /// 1 component vector tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<1, double, packed_mediump> packed_mediump_dvec1; + + /// 1 component vector tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<1, double, packed_lowp> packed_lowp_dvec1; + + /// 1 component vector tightly packed in memory of signed integer numbers. + typedef vec<1, int, packed_highp> packed_highp_ivec1; + + /// 1 component vector tightly packed in memory of signed integer numbers. + typedef vec<1, int, packed_mediump> packed_mediump_ivec1; + + /// 1 component vector tightly packed in memory of signed integer numbers. + typedef vec<1, int, packed_lowp> packed_lowp_ivec1; + + /// 1 component vector tightly packed in memory of unsigned integer numbers. + typedef vec<1, uint, packed_highp> packed_highp_uvec1; + + /// 1 component vector tightly packed in memory of unsigned integer numbers. + typedef vec<1, uint, packed_mediump> packed_mediump_uvec1; + + /// 1 component vector tightly packed in memory of unsigned integer numbers. + typedef vec<1, uint, packed_lowp> packed_lowp_uvec1; + + /// 1 component vector tightly packed in memory of bool values. + typedef vec<1, bool, packed_highp> packed_highp_bvec1; + + /// 1 component vector tightly packed in memory of bool values. + typedef vec<1, bool, packed_mediump> packed_mediump_bvec1; + + /// 1 component vector tightly packed in memory of bool values. + typedef vec<1, bool, packed_lowp> packed_lowp_bvec1; + + // -- *vec2 -- + + /// 2 components vector aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<2, float, aligned_highp> aligned_highp_vec2; + + /// 2 components vector aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<2, float, aligned_mediump> aligned_mediump_vec2; + + /// 2 components vector aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<2, float, aligned_lowp> aligned_lowp_vec2; + + /// 2 components vector aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<2, double, aligned_highp> aligned_highp_dvec2; + + /// 2 components vector aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<2, double, aligned_mediump> aligned_mediump_dvec2; + + /// 2 components vector aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<2, double, aligned_lowp> aligned_lowp_dvec2; + + /// 2 components vector aligned in memory of signed integer numbers. + typedef vec<2, int, aligned_highp> aligned_highp_ivec2; + + /// 2 components vector aligned in memory of signed integer numbers. + typedef vec<2, int, aligned_mediump> aligned_mediump_ivec2; + + /// 2 components vector aligned in memory of signed integer numbers. + typedef vec<2, int, aligned_lowp> aligned_lowp_ivec2; + + /// 2 components vector aligned in memory of unsigned integer numbers. + typedef vec<2, uint, aligned_highp> aligned_highp_uvec2; + + /// 2 components vector aligned in memory of unsigned integer numbers. + typedef vec<2, uint, aligned_mediump> aligned_mediump_uvec2; + + /// 2 components vector aligned in memory of unsigned integer numbers. + typedef vec<2, uint, aligned_lowp> aligned_lowp_uvec2; + + /// 2 components vector aligned in memory of bool values. + typedef vec<2, bool, aligned_highp> aligned_highp_bvec2; + + /// 2 components vector aligned in memory of bool values. + typedef vec<2, bool, aligned_mediump> aligned_mediump_bvec2; + + /// 2 components vector aligned in memory of bool values. + typedef vec<2, bool, aligned_lowp> aligned_lowp_bvec2; + + /// 2 components vector tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<2, float, packed_highp> packed_highp_vec2; + + /// 2 components vector tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<2, float, packed_mediump> packed_mediump_vec2; + + /// 2 components vector tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<2, float, packed_lowp> packed_lowp_vec2; + + /// 2 components vector tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<2, double, packed_highp> packed_highp_dvec2; + + /// 2 components vector tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<2, double, packed_mediump> packed_mediump_dvec2; + + /// 2 components vector tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<2, double, packed_lowp> packed_lowp_dvec2; + + /// 2 components vector tightly packed in memory of signed integer numbers. + typedef vec<2, int, packed_highp> packed_highp_ivec2; + + /// 2 components vector tightly packed in memory of signed integer numbers. + typedef vec<2, int, packed_mediump> packed_mediump_ivec2; + + /// 2 components vector tightly packed in memory of signed integer numbers. + typedef vec<2, int, packed_lowp> packed_lowp_ivec2; + + /// 2 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<2, uint, packed_highp> packed_highp_uvec2; + + /// 2 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<2, uint, packed_mediump> packed_mediump_uvec2; + + /// 2 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<2, uint, packed_lowp> packed_lowp_uvec2; + + /// 2 components vector tightly packed in memory of bool values. + typedef vec<2, bool, packed_highp> packed_highp_bvec2; + + /// 2 components vector tightly packed in memory of bool values. + typedef vec<2, bool, packed_mediump> packed_mediump_bvec2; + + /// 2 components vector tightly packed in memory of bool values. + typedef vec<2, bool, packed_lowp> packed_lowp_bvec2; + + // -- *vec3 -- + + /// 3 components vector aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<3, float, aligned_highp> aligned_highp_vec3; + + /// 3 components vector aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<3, float, aligned_mediump> aligned_mediump_vec3; + + /// 3 components vector aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<3, float, aligned_lowp> aligned_lowp_vec3; + + /// 3 components vector aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<3, double, aligned_highp> aligned_highp_dvec3; + + /// 3 components vector aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<3, double, aligned_mediump> aligned_mediump_dvec3; + + /// 3 components vector aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<3, double, aligned_lowp> aligned_lowp_dvec3; + + /// 3 components vector aligned in memory of signed integer numbers. + typedef vec<3, int, aligned_highp> aligned_highp_ivec3; + + /// 3 components vector aligned in memory of signed integer numbers. + typedef vec<3, int, aligned_mediump> aligned_mediump_ivec3; + + /// 3 components vector aligned in memory of signed integer numbers. + typedef vec<3, int, aligned_lowp> aligned_lowp_ivec3; + + /// 3 components vector aligned in memory of unsigned integer numbers. + typedef vec<3, uint, aligned_highp> aligned_highp_uvec3; + + /// 3 components vector aligned in memory of unsigned integer numbers. + typedef vec<3, uint, aligned_mediump> aligned_mediump_uvec3; + + /// 3 components vector aligned in memory of unsigned integer numbers. + typedef vec<3, uint, aligned_lowp> aligned_lowp_uvec3; + + /// 3 components vector aligned in memory of bool values. + typedef vec<3, bool, aligned_highp> aligned_highp_bvec3; + + /// 3 components vector aligned in memory of bool values. + typedef vec<3, bool, aligned_mediump> aligned_mediump_bvec3; + + /// 3 components vector aligned in memory of bool values. + typedef vec<3, bool, aligned_lowp> aligned_lowp_bvec3; + + /// 3 components vector tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<3, float, packed_highp> packed_highp_vec3; + + /// 3 components vector tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<3, float, packed_mediump> packed_mediump_vec3; + + /// 3 components vector tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<3, float, packed_lowp> packed_lowp_vec3; + + /// 3 components vector tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<3, double, packed_highp> packed_highp_dvec3; + + /// 3 components vector tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<3, double, packed_mediump> packed_mediump_dvec3; + + /// 3 components vector tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<3, double, packed_lowp> packed_lowp_dvec3; + + /// 3 components vector tightly packed in memory of signed integer numbers. + typedef vec<3, int, packed_highp> packed_highp_ivec3; + + /// 3 components vector tightly packed in memory of signed integer numbers. + typedef vec<3, int, packed_mediump> packed_mediump_ivec3; + + /// 3 components vector tightly packed in memory of signed integer numbers. + typedef vec<3, int, packed_lowp> packed_lowp_ivec3; + + /// 3 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<3, uint, packed_highp> packed_highp_uvec3; + + /// 3 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<3, uint, packed_mediump> packed_mediump_uvec3; + + /// 3 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<3, uint, packed_lowp> packed_lowp_uvec3; + + /// 3 components vector tightly packed in memory of bool values. + typedef vec<3, bool, packed_highp> packed_highp_bvec3; + + /// 3 components vector tightly packed in memory of bool values. + typedef vec<3, bool, packed_mediump> packed_mediump_bvec3; + + /// 3 components vector tightly packed in memory of bool values. + typedef vec<3, bool, packed_lowp> packed_lowp_bvec3; + + // -- *vec4 -- + + /// 4 components vector aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<4, float, aligned_highp> aligned_highp_vec4; + + /// 4 components vector aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<4, float, aligned_mediump> aligned_mediump_vec4; + + /// 4 components vector aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<4, float, aligned_lowp> aligned_lowp_vec4; + + /// 4 components vector aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<4, double, aligned_highp> aligned_highp_dvec4; + + /// 4 components vector aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<4, double, aligned_mediump> aligned_mediump_dvec4; + + /// 4 components vector aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<4, double, aligned_lowp> aligned_lowp_dvec4; + + /// 4 components vector aligned in memory of signed integer numbers. + typedef vec<4, int, aligned_highp> aligned_highp_ivec4; + + /// 4 components vector aligned in memory of signed integer numbers. + typedef vec<4, int, aligned_mediump> aligned_mediump_ivec4; + + /// 4 components vector aligned in memory of signed integer numbers. + typedef vec<4, int, aligned_lowp> aligned_lowp_ivec4; + + /// 4 components vector aligned in memory of unsigned integer numbers. + typedef vec<4, uint, aligned_highp> aligned_highp_uvec4; + + /// 4 components vector aligned in memory of unsigned integer numbers. + typedef vec<4, uint, aligned_mediump> aligned_mediump_uvec4; + + /// 4 components vector aligned in memory of unsigned integer numbers. + typedef vec<4, uint, aligned_lowp> aligned_lowp_uvec4; + + /// 4 components vector aligned in memory of bool values. + typedef vec<4, bool, aligned_highp> aligned_highp_bvec4; + + /// 4 components vector aligned in memory of bool values. + typedef vec<4, bool, aligned_mediump> aligned_mediump_bvec4; + + /// 4 components vector aligned in memory of bool values. + typedef vec<4, bool, aligned_lowp> aligned_lowp_bvec4; + + /// 4 components vector tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<4, float, packed_highp> packed_highp_vec4; + + /// 4 components vector tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<4, float, packed_mediump> packed_mediump_vec4; + + /// 4 components vector tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<4, float, packed_lowp> packed_lowp_vec4; + + /// 4 components vector tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef vec<4, double, packed_highp> packed_highp_dvec4; + + /// 4 components vector tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef vec<4, double, packed_mediump> packed_mediump_dvec4; + + /// 4 components vector tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef vec<4, double, packed_lowp> packed_lowp_dvec4; + + /// 4 components vector tightly packed in memory of signed integer numbers. + typedef vec<4, int, packed_highp> packed_highp_ivec4; + + /// 4 components vector tightly packed in memory of signed integer numbers. + typedef vec<4, int, packed_mediump> packed_mediump_ivec4; + + /// 4 components vector tightly packed in memory of signed integer numbers. + typedef vec<4, int, packed_lowp> packed_lowp_ivec4; + + /// 4 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<4, uint, packed_highp> packed_highp_uvec4; + + /// 4 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<4, uint, packed_mediump> packed_mediump_uvec4; + + /// 4 components vector tightly packed in memory of unsigned integer numbers. + typedef vec<4, uint, packed_lowp> packed_lowp_uvec4; + + /// 4 components vector tightly packed in memory of bool values. + typedef vec<4, bool, packed_highp> packed_highp_bvec4; + + /// 4 components vector tightly packed in memory of bool values. + typedef vec<4, bool, packed_mediump> packed_mediump_bvec4; + + /// 4 components vector tightly packed in memory of bool values. + typedef vec<4, bool, packed_lowp> packed_lowp_bvec4; + + // -- *mat2 -- + + /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 2, float, aligned_highp> aligned_highp_mat2; + + /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 2, float, aligned_mediump> aligned_mediump_mat2; + + /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 2, float, aligned_lowp> aligned_lowp_mat2; + + /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 2, double, aligned_highp> aligned_highp_dmat2; + + /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 2, double, aligned_mediump> aligned_mediump_dmat2; + + /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 2, double, aligned_lowp> aligned_lowp_dmat2; + + /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 2, float, packed_highp> packed_highp_mat2; + + /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 2, float, packed_mediump> packed_mediump_mat2; + + /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 2, float, packed_lowp> packed_lowp_mat2; + + /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 2, double, packed_highp> packed_highp_dmat2; + + /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 2, double, packed_mediump> packed_mediump_dmat2; + + /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 2, double, packed_lowp> packed_lowp_dmat2; + + // -- *mat3 -- + + /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 3, float, aligned_highp> aligned_highp_mat3; + + /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 3, float, aligned_mediump> aligned_mediump_mat3; + + /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 3, float, aligned_lowp> aligned_lowp_mat3; + + /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 3, double, aligned_highp> aligned_highp_dmat3; + + /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 3, double, aligned_mediump> aligned_mediump_dmat3; + + /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 3, double, aligned_lowp> aligned_lowp_dmat3; + + /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 3, float, packed_highp> packed_highp_mat3; + + /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 3, float, packed_mediump> packed_mediump_mat3; + + /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 3, float, packed_lowp> packed_lowp_mat3; + + /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 3, double, packed_highp> packed_highp_dmat3; + + /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 3, double, packed_mediump> packed_mediump_dmat3; + + /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 3, double, packed_lowp> packed_lowp_dmat3; + + // -- *mat4 -- + + /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 4, float, aligned_highp> aligned_highp_mat4; + + /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 4, float, aligned_mediump> aligned_mediump_mat4; + + /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 4, float, aligned_lowp> aligned_lowp_mat4; + + /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 4, double, aligned_highp> aligned_highp_dmat4; + + /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 4, double, aligned_mediump> aligned_mediump_dmat4; + + /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 4, double, aligned_lowp> aligned_lowp_dmat4; + + /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 4, float, packed_highp> packed_highp_mat4; + + /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 4, float, packed_mediump> packed_mediump_mat4; + + /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 4, float, packed_lowp> packed_lowp_mat4; + + /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 4, double, packed_highp> packed_highp_dmat4; + + /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 4, double, packed_mediump> packed_mediump_dmat4; + + /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 4, double, packed_lowp> packed_lowp_dmat4; + + // -- *mat2x2 -- + + /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 2, float, aligned_highp> aligned_highp_mat2x2; + + /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 2, float, aligned_mediump> aligned_mediump_mat2x2; + + /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 2, float, aligned_lowp> aligned_lowp_mat2x2; + + /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 2, double, aligned_highp> aligned_highp_dmat2x2; + + /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 2, double, aligned_mediump> aligned_mediump_dmat2x2; + + /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 2, double, aligned_lowp> aligned_lowp_dmat2x2; + + /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 2, float, packed_highp> packed_highp_mat2x2; + + /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 2, float, packed_mediump> packed_mediump_mat2x2; + + /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 2, float, packed_lowp> packed_lowp_mat2x2; + + /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 2, double, packed_highp> packed_highp_dmat2x2; + + /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 2, double, packed_mediump> packed_mediump_dmat2x2; + + /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 2, double, packed_lowp> packed_lowp_dmat2x2; + + // -- *mat2x3 -- + + /// 2 by 3 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 3, float, aligned_highp> aligned_highp_mat2x3; + + /// 2 by 3 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 3, float, aligned_mediump> aligned_mediump_mat2x3; + + /// 2 by 3 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 3, float, aligned_lowp> aligned_lowp_mat2x3; + + /// 2 by 3 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 3, double, aligned_highp> aligned_highp_dmat2x3; + + /// 2 by 3 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 3, double, aligned_mediump> aligned_mediump_dmat2x3; + + /// 2 by 3 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 3, double, aligned_lowp> aligned_lowp_dmat2x3; + + /// 2 by 3 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 3, float, packed_highp> packed_highp_mat2x3; + + /// 2 by 3 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 3, float, packed_mediump> packed_mediump_mat2x3; + + /// 2 by 3 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 3, float, packed_lowp> packed_lowp_mat2x3; + + /// 2 by 3 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 3, double, packed_highp> packed_highp_dmat2x3; + + /// 2 by 3 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 3, double, packed_mediump> packed_mediump_dmat2x3; + + /// 2 by 3 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 3, double, packed_lowp> packed_lowp_dmat2x3; + + // -- *mat2x4 -- + + /// 2 by 4 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 4, float, aligned_highp> aligned_highp_mat2x4; + + /// 2 by 4 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 4, float, aligned_mediump> aligned_mediump_mat2x4; + + /// 2 by 4 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 4, float, aligned_lowp> aligned_lowp_mat2x4; + + /// 2 by 4 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 4, double, aligned_highp> aligned_highp_dmat2x4; + + /// 2 by 4 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 4, double, aligned_mediump> aligned_mediump_dmat2x4; + + /// 2 by 4 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 4, double, aligned_lowp> aligned_lowp_dmat2x4; + + /// 2 by 4 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 4, float, packed_highp> packed_highp_mat2x4; + + /// 2 by 4 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 4, float, packed_mediump> packed_mediump_mat2x4; + + /// 2 by 4 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 4, float, packed_lowp> packed_lowp_mat2x4; + + /// 2 by 4 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<2, 4, double, packed_highp> packed_highp_dmat2x4; + + /// 2 by 4 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<2, 4, double, packed_mediump> packed_mediump_dmat2x4; + + /// 2 by 4 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<2, 4, double, packed_lowp> packed_lowp_dmat2x4; + + // -- *mat3x2 -- + + /// 3 by 2 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 2, float, aligned_highp> aligned_highp_mat3x2; + + /// 3 by 2 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 2, float, aligned_mediump> aligned_mediump_mat3x2; + + /// 3 by 2 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 2, float, aligned_lowp> aligned_lowp_mat3x2; + + /// 3 by 2 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 2, double, aligned_highp> aligned_highp_dmat3x2; + + /// 3 by 2 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 2, double, aligned_mediump> aligned_mediump_dmat3x2; + + /// 3 by 2 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 2, double, aligned_lowp> aligned_lowp_dmat3x2; + + /// 3 by 2 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 2, float, packed_highp> packed_highp_mat3x2; + + /// 3 by 2 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 2, float, packed_mediump> packed_mediump_mat3x2; + + /// 3 by 2 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 2, float, packed_lowp> packed_lowp_mat3x2; + + /// 3 by 2 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 2, double, packed_highp> packed_highp_dmat3x2; + + /// 3 by 2 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 2, double, packed_mediump> packed_mediump_dmat3x2; + + /// 3 by 2 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 2, double, packed_lowp> packed_lowp_dmat3x2; + + // -- *mat3x3 -- + + /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 3, float, aligned_highp> aligned_highp_mat3x3; + + /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 3, float, aligned_mediump> aligned_mediump_mat3x3; + + /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 3, float, aligned_lowp> aligned_lowp_mat3x3; + + /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 3, double, aligned_highp> aligned_highp_dmat3x3; + + /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 3, double, aligned_mediump> aligned_mediump_dmat3x3; + + /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 3, double, aligned_lowp> aligned_lowp_dmat3x3; + + /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 3, float, packed_highp> packed_highp_mat3x3; + + /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 3, float, packed_mediump> packed_mediump_mat3x3; + + /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 3, float, packed_lowp> packed_lowp_mat3x3; + + /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 3, double, packed_highp> packed_highp_dmat3x3; + + /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 3, double, packed_mediump> packed_mediump_dmat3x3; + + /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 3, double, packed_lowp> packed_lowp_dmat3x3; + + // -- *mat3x4 -- + + /// 3 by 4 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 4, float, aligned_highp> aligned_highp_mat3x4; + + /// 3 by 4 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 4, float, aligned_mediump> aligned_mediump_mat3x4; + + /// 3 by 4 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 4, float, aligned_lowp> aligned_lowp_mat3x4; + + /// 3 by 4 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 4, double, aligned_highp> aligned_highp_dmat3x4; + + /// 3 by 4 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 4, double, aligned_mediump> aligned_mediump_dmat3x4; + + /// 3 by 4 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 4, double, aligned_lowp> aligned_lowp_dmat3x4; + + /// 3 by 4 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 4, float, packed_highp> packed_highp_mat3x4; + + /// 3 by 4 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 4, float, packed_mediump> packed_mediump_mat3x4; + + /// 3 by 4 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 4, float, packed_lowp> packed_lowp_mat3x4; + + /// 3 by 4 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<3, 4, double, packed_highp> packed_highp_dmat3x4; + + /// 3 by 4 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<3, 4, double, packed_mediump> packed_mediump_dmat3x4; + + /// 3 by 4 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<3, 4, double, packed_lowp> packed_lowp_dmat3x4; + + // -- *mat4x2 -- + + /// 4 by 2 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 2, float, aligned_highp> aligned_highp_mat4x2; + + /// 4 by 2 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 2, float, aligned_mediump> aligned_mediump_mat4x2; + + /// 4 by 2 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 2, float, aligned_lowp> aligned_lowp_mat4x2; + + /// 4 by 2 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 2, double, aligned_highp> aligned_highp_dmat4x2; + + /// 4 by 2 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 2, double, aligned_mediump> aligned_mediump_dmat4x2; + + /// 4 by 2 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 2, double, aligned_lowp> aligned_lowp_dmat4x2; + + /// 4 by 2 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 2, float, packed_highp> packed_highp_mat4x2; + + /// 4 by 2 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 2, float, packed_mediump> packed_mediump_mat4x2; + + /// 4 by 2 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 2, float, packed_lowp> packed_lowp_mat4x2; + + /// 4 by 2 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 2, double, packed_highp> packed_highp_dmat4x2; + + /// 4 by 2 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 2, double, packed_mediump> packed_mediump_dmat4x2; + + /// 4 by 2 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 2, double, packed_lowp> packed_lowp_dmat4x2; + + // -- *mat4x3 -- + + /// 4 by 3 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 3, float, aligned_highp> aligned_highp_mat4x3; + + /// 4 by 3 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 3, float, aligned_mediump> aligned_mediump_mat4x3; + + /// 4 by 3 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 3, float, aligned_lowp> aligned_lowp_mat4x3; + + /// 4 by 3 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 3, double, aligned_highp> aligned_highp_dmat4x3; + + /// 4 by 3 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 3, double, aligned_mediump> aligned_mediump_dmat4x3; + + /// 4 by 3 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 3, double, aligned_lowp> aligned_lowp_dmat4x3; + + /// 4 by 3 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 3, float, packed_highp> packed_highp_mat4x3; + + /// 4 by 3 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 3, float, packed_mediump> packed_mediump_mat4x3; + + /// 4 by 3 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 3, float, packed_lowp> packed_lowp_mat4x3; + + /// 4 by 3 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 3, double, packed_highp> packed_highp_dmat4x3; + + /// 4 by 3 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 3, double, packed_mediump> packed_mediump_dmat4x3; + + /// 4 by 3 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 3, double, packed_lowp> packed_lowp_dmat4x3; + + // -- *mat4x4 -- + + /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 4, float, aligned_highp> aligned_highp_mat4x4; + + /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 4, float, aligned_mediump> aligned_mediump_mat4x4; + + /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 4, float, aligned_lowp> aligned_lowp_mat4x4; + + /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 4, double, aligned_highp> aligned_highp_dmat4x4; + + /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 4, double, aligned_mediump> aligned_mediump_dmat4x4; + + /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 4, double, aligned_lowp> aligned_lowp_dmat4x4; + + /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 4, float, packed_highp> packed_highp_mat4x4; + + /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 4, float, packed_mediump> packed_mediump_mat4x4; + + /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 4, float, packed_lowp> packed_lowp_mat4x4; + + /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. + typedef mat<4, 4, double, packed_highp> packed_highp_dmat4x4; + + /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. + typedef mat<4, 4, double, packed_mediump> packed_mediump_dmat4x4; + + /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. + typedef mat<4, 4, double, packed_lowp> packed_lowp_dmat4x4; + + // -- default -- + +#if(defined(GLM_PRECISION_LOWP_FLOAT)) + typedef aligned_lowp_vec1 aligned_vec1; + typedef aligned_lowp_vec2 aligned_vec2; + typedef aligned_lowp_vec3 aligned_vec3; + typedef aligned_lowp_vec4 aligned_vec4; + typedef packed_lowp_vec1 packed_vec1; + typedef packed_lowp_vec2 packed_vec2; + typedef packed_lowp_vec3 packed_vec3; + typedef packed_lowp_vec4 packed_vec4; + + typedef aligned_lowp_mat2 aligned_mat2; + typedef aligned_lowp_mat3 aligned_mat3; + typedef aligned_lowp_mat4 aligned_mat4; + typedef packed_lowp_mat2 packed_mat2; + typedef packed_lowp_mat3 packed_mat3; + typedef packed_lowp_mat4 packed_mat4; + + typedef aligned_lowp_mat2x2 aligned_mat2x2; + typedef aligned_lowp_mat2x3 aligned_mat2x3; + typedef aligned_lowp_mat2x4 aligned_mat2x4; + typedef aligned_lowp_mat3x2 aligned_mat3x2; + typedef aligned_lowp_mat3x3 aligned_mat3x3; + typedef aligned_lowp_mat3x4 aligned_mat3x4; + typedef aligned_lowp_mat4x2 aligned_mat4x2; + typedef aligned_lowp_mat4x3 aligned_mat4x3; + typedef aligned_lowp_mat4x4 aligned_mat4x4; + typedef packed_lowp_mat2x2 packed_mat2x2; + typedef packed_lowp_mat2x3 packed_mat2x3; + typedef packed_lowp_mat2x4 packed_mat2x4; + typedef packed_lowp_mat3x2 packed_mat3x2; + typedef packed_lowp_mat3x3 packed_mat3x3; + typedef packed_lowp_mat3x4 packed_mat3x4; + typedef packed_lowp_mat4x2 packed_mat4x2; + typedef packed_lowp_mat4x3 packed_mat4x3; + typedef packed_lowp_mat4x4 packed_mat4x4; +#elif(defined(GLM_PRECISION_MEDIUMP_FLOAT)) + typedef aligned_mediump_vec1 aligned_vec1; + typedef aligned_mediump_vec2 aligned_vec2; + typedef aligned_mediump_vec3 aligned_vec3; + typedef aligned_mediump_vec4 aligned_vec4; + typedef packed_mediump_vec1 packed_vec1; + typedef packed_mediump_vec2 packed_vec2; + typedef packed_mediump_vec3 packed_vec3; + typedef packed_mediump_vec4 packed_vec4; + + typedef aligned_mediump_mat2 aligned_mat2; + typedef aligned_mediump_mat3 aligned_mat3; + typedef aligned_mediump_mat4 aligned_mat4; + typedef packed_mediump_mat2 packed_mat2; + typedef packed_mediump_mat3 packed_mat3; + typedef packed_mediump_mat4 packed_mat4; + + typedef aligned_mediump_mat2x2 aligned_mat2x2; + typedef aligned_mediump_mat2x3 aligned_mat2x3; + typedef aligned_mediump_mat2x4 aligned_mat2x4; + typedef aligned_mediump_mat3x2 aligned_mat3x2; + typedef aligned_mediump_mat3x3 aligned_mat3x3; + typedef aligned_mediump_mat3x4 aligned_mat3x4; + typedef aligned_mediump_mat4x2 aligned_mat4x2; + typedef aligned_mediump_mat4x3 aligned_mat4x3; + typedef aligned_mediump_mat4x4 aligned_mat4x4; + typedef packed_mediump_mat2x2 packed_mat2x2; + typedef packed_mediump_mat2x3 packed_mat2x3; + typedef packed_mediump_mat2x4 packed_mat2x4; + typedef packed_mediump_mat3x2 packed_mat3x2; + typedef packed_mediump_mat3x3 packed_mat3x3; + typedef packed_mediump_mat3x4 packed_mat3x4; + typedef packed_mediump_mat4x2 packed_mat4x2; + typedef packed_mediump_mat4x3 packed_mat4x3; + typedef packed_mediump_mat4x4 packed_mat4x4; +#else //defined(GLM_PRECISION_HIGHP_FLOAT) + /// 1 component vector aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_vec1 aligned_vec1; + + /// 2 components vector aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_vec2 aligned_vec2; + + /// 3 components vector aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_vec3 aligned_vec3; + + /// 4 components vector aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_vec4 aligned_vec4; + + /// 1 component vector tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_vec1 packed_vec1; + + /// 2 components vector tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_vec2 packed_vec2; + + /// 3 components vector tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_vec3 packed_vec3; + + /// 4 components vector tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_vec4 packed_vec4; + + /// 2 by 2 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat2 aligned_mat2; + + /// 3 by 3 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat3 aligned_mat3; + + /// 4 by 4 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat4 aligned_mat4; + + /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat2 packed_mat2; + + /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat3 packed_mat3; + + /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat4 packed_mat4; + + /// 2 by 2 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat2x2 aligned_mat2x2; + + /// 2 by 3 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat2x3 aligned_mat2x3; + + /// 2 by 4 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat2x4 aligned_mat2x4; + + /// 3 by 2 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat3x2 aligned_mat3x2; + + /// 3 by 3 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat3x3 aligned_mat3x3; + + /// 3 by 4 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat3x4 aligned_mat3x4; + + /// 4 by 2 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat4x2 aligned_mat4x2; + + /// 4 by 3 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat4x3 aligned_mat4x3; + + /// 4 by 4 matrix tightly aligned in memory of single-precision floating-point numbers. + typedef aligned_highp_mat4x4 aligned_mat4x4; + + /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat2x2 packed_mat2x2; + + /// 2 by 3 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat2x3 packed_mat2x3; + + /// 2 by 4 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat2x4 packed_mat2x4; + + /// 3 by 2 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat3x2 packed_mat3x2; + + /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat3x3 packed_mat3x3; + + /// 3 by 4 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat3x4 packed_mat3x4; + + /// 4 by 2 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat4x2 packed_mat4x2; + + /// 4 by 3 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat4x3 packed_mat4x3; + + /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers. + typedef packed_highp_mat4x4 packed_mat4x4; +#endif//GLM_PRECISION + +#if(defined(GLM_PRECISION_LOWP_DOUBLE)) + typedef aligned_lowp_dvec1 aligned_dvec1; + typedef aligned_lowp_dvec2 aligned_dvec2; + typedef aligned_lowp_dvec3 aligned_dvec3; + typedef aligned_lowp_dvec4 aligned_dvec4; + typedef packed_lowp_dvec1 packed_dvec1; + typedef packed_lowp_dvec2 packed_dvec2; + typedef packed_lowp_dvec3 packed_dvec3; + typedef packed_lowp_dvec4 packed_dvec4; + + typedef aligned_lowp_dmat2 aligned_dmat2; + typedef aligned_lowp_dmat3 aligned_dmat3; + typedef aligned_lowp_dmat4 aligned_dmat4; + typedef packed_lowp_dmat2 packed_dmat2; + typedef packed_lowp_dmat3 packed_dmat3; + typedef packed_lowp_dmat4 packed_dmat4; + + typedef aligned_lowp_dmat2x2 aligned_dmat2x2; + typedef aligned_lowp_dmat2x3 aligned_dmat2x3; + typedef aligned_lowp_dmat2x4 aligned_dmat2x4; + typedef aligned_lowp_dmat3x2 aligned_dmat3x2; + typedef aligned_lowp_dmat3x3 aligned_dmat3x3; + typedef aligned_lowp_dmat3x4 aligned_dmat3x4; + typedef aligned_lowp_dmat4x2 aligned_dmat4x2; + typedef aligned_lowp_dmat4x3 aligned_dmat4x3; + typedef aligned_lowp_dmat4x4 aligned_dmat4x4; + typedef packed_lowp_dmat2x2 packed_dmat2x2; + typedef packed_lowp_dmat2x3 packed_dmat2x3; + typedef packed_lowp_dmat2x4 packed_dmat2x4; + typedef packed_lowp_dmat3x2 packed_dmat3x2; + typedef packed_lowp_dmat3x3 packed_dmat3x3; + typedef packed_lowp_dmat3x4 packed_dmat3x4; + typedef packed_lowp_dmat4x2 packed_dmat4x2; + typedef packed_lowp_dmat4x3 packed_dmat4x3; + typedef packed_lowp_dmat4x4 packed_dmat4x4; +#elif(defined(GLM_PRECISION_MEDIUMP_DOUBLE)) + typedef aligned_mediump_dvec1 aligned_dvec1; + typedef aligned_mediump_dvec2 aligned_dvec2; + typedef aligned_mediump_dvec3 aligned_dvec3; + typedef aligned_mediump_dvec4 aligned_dvec4; + typedef packed_mediump_dvec1 packed_dvec1; + typedef packed_mediump_dvec2 packed_dvec2; + typedef packed_mediump_dvec3 packed_dvec3; + typedef packed_mediump_dvec4 packed_dvec4; + + typedef aligned_mediump_dmat2 aligned_dmat2; + typedef aligned_mediump_dmat3 aligned_dmat3; + typedef aligned_mediump_dmat4 aligned_dmat4; + typedef packed_mediump_dmat2 packed_dmat2; + typedef packed_mediump_dmat3 packed_dmat3; + typedef packed_mediump_dmat4 packed_dmat4; + + typedef aligned_mediump_dmat2x2 aligned_dmat2x2; + typedef aligned_mediump_dmat2x3 aligned_dmat2x3; + typedef aligned_mediump_dmat2x4 aligned_dmat2x4; + typedef aligned_mediump_dmat3x2 aligned_dmat3x2; + typedef aligned_mediump_dmat3x3 aligned_dmat3x3; + typedef aligned_mediump_dmat3x4 aligned_dmat3x4; + typedef aligned_mediump_dmat4x2 aligned_dmat4x2; + typedef aligned_mediump_dmat4x3 aligned_dmat4x3; + typedef aligned_mediump_dmat4x4 aligned_dmat4x4; + typedef packed_mediump_dmat2x2 packed_dmat2x2; + typedef packed_mediump_dmat2x3 packed_dmat2x3; + typedef packed_mediump_dmat2x4 packed_dmat2x4; + typedef packed_mediump_dmat3x2 packed_dmat3x2; + typedef packed_mediump_dmat3x3 packed_dmat3x3; + typedef packed_mediump_dmat3x4 packed_dmat3x4; + typedef packed_mediump_dmat4x2 packed_dmat4x2; + typedef packed_mediump_dmat4x3 packed_dmat4x3; + typedef packed_mediump_dmat4x4 packed_dmat4x4; +#else //defined(GLM_PRECISION_HIGHP_DOUBLE) + /// 1 component vector aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dvec1 aligned_dvec1; + + /// 2 components vector aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dvec2 aligned_dvec2; + + /// 3 components vector aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dvec3 aligned_dvec3; + + /// 4 components vector aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dvec4 aligned_dvec4; + + /// 1 component vector tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dvec1 packed_dvec1; + + /// 2 components vector tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dvec2 packed_dvec2; + + /// 3 components vector tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dvec3 packed_dvec3; + + /// 4 components vector tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dvec4 packed_dvec4; + + /// 2 by 2 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat2 aligned_dmat2; + + /// 3 by 3 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat3 aligned_dmat3; + + /// 4 by 4 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat4 aligned_dmat4; + + /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat2 packed_dmat2; + + /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat3 packed_dmat3; + + /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat4 packed_dmat4; + + /// 2 by 2 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat2x2 aligned_dmat2x2; + + /// 2 by 3 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat2x3 aligned_dmat2x3; + + /// 2 by 4 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat2x4 aligned_dmat2x4; + + /// 3 by 2 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat3x2 aligned_dmat3x2; + + /// 3 by 3 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat3x3 aligned_dmat3x3; + + /// 3 by 4 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat3x4 aligned_dmat3x4; + + /// 4 by 2 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat4x2 aligned_dmat4x2; + + /// 4 by 3 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat4x3 aligned_dmat4x3; + + /// 4 by 4 matrix tightly aligned in memory of double-precision floating-point numbers. + typedef aligned_highp_dmat4x4 aligned_dmat4x4; + + /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat2x2 packed_dmat2x2; + + /// 2 by 3 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat2x3 packed_dmat2x3; + + /// 2 by 4 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat2x4 packed_dmat2x4; + + /// 3 by 2 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat3x2 packed_dmat3x2; + + /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat3x3 packed_dmat3x3; + + /// 3 by 4 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat3x4 packed_dmat3x4; + + /// 4 by 2 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat4x2 packed_dmat4x2; + + /// 4 by 3 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat4x3 packed_dmat4x3; + + /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers. + typedef packed_highp_dmat4x4 packed_dmat4x4; +#endif//GLM_PRECISION + +#if(defined(GLM_PRECISION_LOWP_INT)) + typedef aligned_lowp_ivec1 aligned_ivec1; + typedef aligned_lowp_ivec2 aligned_ivec2; + typedef aligned_lowp_ivec3 aligned_ivec3; + typedef aligned_lowp_ivec4 aligned_ivec4; +#elif(defined(GLM_PRECISION_MEDIUMP_INT)) + typedef aligned_mediump_ivec1 aligned_ivec1; + typedef aligned_mediump_ivec2 aligned_ivec2; + typedef aligned_mediump_ivec3 aligned_ivec3; + typedef aligned_mediump_ivec4 aligned_ivec4; +#else //defined(GLM_PRECISION_HIGHP_INT) + /// 1 component vector aligned in memory of signed integer numbers. + typedef aligned_highp_ivec1 aligned_ivec1; + + /// 2 components vector aligned in memory of signed integer numbers. + typedef aligned_highp_ivec2 aligned_ivec2; + + /// 3 components vector aligned in memory of signed integer numbers. + typedef aligned_highp_ivec3 aligned_ivec3; + + /// 4 components vector aligned in memory of signed integer numbers. + typedef aligned_highp_ivec4 aligned_ivec4; + + /// 1 component vector tightly packed in memory of signed integer numbers. + typedef packed_highp_ivec1 packed_ivec1; + + /// 2 components vector tightly packed in memory of signed integer numbers. + typedef packed_highp_ivec2 packed_ivec2; + + /// 3 components vector tightly packed in memory of signed integer numbers. + typedef packed_highp_ivec3 packed_ivec3; + + /// 4 components vector tightly packed in memory of signed integer numbers. + typedef packed_highp_ivec4 packed_ivec4; +#endif//GLM_PRECISION + + // -- Unsigned integer definition -- + +#if(defined(GLM_PRECISION_LOWP_UINT)) + typedef aligned_lowp_uvec1 aligned_uvec1; + typedef aligned_lowp_uvec2 aligned_uvec2; + typedef aligned_lowp_uvec3 aligned_uvec3; + typedef aligned_lowp_uvec4 aligned_uvec4; +#elif(defined(GLM_PRECISION_MEDIUMP_UINT)) + typedef aligned_mediump_uvec1 aligned_uvec1; + typedef aligned_mediump_uvec2 aligned_uvec2; + typedef aligned_mediump_uvec3 aligned_uvec3; + typedef aligned_mediump_uvec4 aligned_uvec4; +#else //defined(GLM_PRECISION_HIGHP_UINT) + /// 1 component vector aligned in memory of unsigned integer numbers. + typedef aligned_highp_uvec1 aligned_uvec1; + + /// 2 components vector aligned in memory of unsigned integer numbers. + typedef aligned_highp_uvec2 aligned_uvec2; + + /// 3 components vector aligned in memory of unsigned integer numbers. + typedef aligned_highp_uvec3 aligned_uvec3; + + /// 4 components vector aligned in memory of unsigned integer numbers. + typedef aligned_highp_uvec4 aligned_uvec4; + + /// 1 component vector tightly packed in memory of unsigned integer numbers. + typedef packed_highp_uvec1 packed_uvec1; + + /// 2 components vector tightly packed in memory of unsigned integer numbers. + typedef packed_highp_uvec2 packed_uvec2; + + /// 3 components vector tightly packed in memory of unsigned integer numbers. + typedef packed_highp_uvec3 packed_uvec3; + + /// 4 components vector tightly packed in memory of unsigned integer numbers. + typedef packed_highp_uvec4 packed_uvec4; +#endif//GLM_PRECISION + +#if(defined(GLM_PRECISION_LOWP_BOOL)) + typedef aligned_lowp_bvec1 aligned_bvec1; + typedef aligned_lowp_bvec2 aligned_bvec2; + typedef aligned_lowp_bvec3 aligned_bvec3; + typedef aligned_lowp_bvec4 aligned_bvec4; +#elif(defined(GLM_PRECISION_MEDIUMP_BOOL)) + typedef aligned_mediump_bvec1 aligned_bvec1; + typedef aligned_mediump_bvec2 aligned_bvec2; + typedef aligned_mediump_bvec3 aligned_bvec3; + typedef aligned_mediump_bvec4 aligned_bvec4; +#else //defined(GLM_PRECISION_HIGHP_BOOL) + /// 1 component vector aligned in memory of bool values. + typedef aligned_highp_bvec1 aligned_bvec1; + + /// 2 components vector aligned in memory of bool values. + typedef aligned_highp_bvec2 aligned_bvec2; + + /// 3 components vector aligned in memory of bool values. + typedef aligned_highp_bvec3 aligned_bvec3; + + /// 4 components vector aligned in memory of bool values. + typedef aligned_highp_bvec4 aligned_bvec4; + + /// 1 components vector tightly packed in memory of bool values. + typedef packed_highp_bvec1 packed_bvec1; + + /// 2 components vector tightly packed in memory of bool values. + typedef packed_highp_bvec2 packed_bvec2; + + /// 3 components vector tightly packed in memory of bool values. + typedef packed_highp_bvec3 packed_bvec3; + + /// 4 components vector tightly packed in memory of bool values. + typedef packed_highp_bvec4 packed_bvec4; +#endif//GLM_PRECISION + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtc/type_precision.hpp b/MacroLibX/third_party/glm/gtc/type_precision.hpp new file mode 100644 index 0000000..250bc4f --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/type_precision.hpp @@ -0,0 +1,2138 @@ +/// @ref gtc_type_precision +/// @file glm/gtc/type_precision.hpp +/// +/// @see core (dependence) +/// @see gtc_quaternion (dependence) +/// +/// @defgroup gtc_type_precision GLM_GTC_type_precision +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Defines specific C++-based qualifier types. + +#pragma once + +// Dependency: +#include "../gtc/quaternion.hpp" +#include "../gtc/vec1.hpp" +#include "../ext/scalar_int_sized.hpp" +#include "../ext/scalar_uint_sized.hpp" +#include "../detail/type_vec2.hpp" +#include "../detail/type_vec3.hpp" +#include "../detail/type_vec4.hpp" +#include "../detail/type_mat2x2.hpp" +#include "../detail/type_mat2x3.hpp" +#include "../detail/type_mat2x4.hpp" +#include "../detail/type_mat3x2.hpp" +#include "../detail/type_mat3x3.hpp" +#include "../detail/type_mat3x4.hpp" +#include "../detail/type_mat4x2.hpp" +#include "../detail/type_mat4x3.hpp" +#include "../detail/type_mat4x4.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_type_precision extension included") +#endif + +namespace glm +{ + /////////////////////////// + // Signed int vector types + + /// @addtogroup gtc_type_precision + /// @{ + + /// Low qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 lowp_int8; + + /// Low qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 lowp_int16; + + /// Low qualifier 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 lowp_int32; + + /// Low qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 lowp_int64; + + /// Low qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 lowp_int8_t; + + /// Low qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 lowp_int16_t; + + /// Low qualifier 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 lowp_int32_t; + + /// Low qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 lowp_int64_t; + + /// Low qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 lowp_i8; + + /// Low qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 lowp_i16; + + /// Low qualifier 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 lowp_i32; + + /// Low qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 lowp_i64; + + /// Medium qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 mediump_int8; + + /// Medium qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 mediump_int16; + + /// Medium qualifier 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 mediump_int32; + + /// Medium qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 mediump_int64; + + /// Medium qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 mediump_int8_t; + + /// Medium qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 mediump_int16_t; + + /// Medium qualifier 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 mediump_int32_t; + + /// Medium qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 mediump_int64_t; + + /// Medium qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 mediump_i8; + + /// Medium qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 mediump_i16; + + /// Medium qualifier 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 mediump_i32; + + /// Medium qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 mediump_i64; + + /// High qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 highp_int8; + + /// High qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 highp_int16; + + /// High qualifier 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 highp_int32; + + /// High qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 highp_int64; + + /// High qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 highp_int8_t; + + /// High qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 highp_int16_t; + + /// 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 highp_int32_t; + + /// High qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 highp_int64_t; + + /// High qualifier 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 highp_i8; + + /// High qualifier 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 highp_i16; + + /// High qualifier 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 highp_i32; + + /// High qualifier 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 highp_i64; + + +#if GLM_HAS_EXTENDED_INTEGER_TYPE + using std::int8_t; + using std::int16_t; + using std::int32_t; + using std::int64_t; +#else + /// 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 int8_t; + + /// 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 int16_t; + + /// 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 int32_t; + + /// 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 int64_t; +#endif + + /// 8 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int8 i8; + + /// 16 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int16 i16; + + /// 32 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int32 i32; + + /// 64 bit signed integer type. + /// @see gtc_type_precision + typedef detail::int64 i64; + + + + /// Low qualifier 8 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i8, lowp> lowp_i8vec1; + + /// Low qualifier 8 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i8, lowp> lowp_i8vec2; + + /// Low qualifier 8 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i8, lowp> lowp_i8vec3; + + /// Low qualifier 8 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i8, lowp> lowp_i8vec4; + + + /// Medium qualifier 8 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i8, mediump> mediump_i8vec1; + + /// Medium qualifier 8 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i8, mediump> mediump_i8vec2; + + /// Medium qualifier 8 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i8, mediump> mediump_i8vec3; + + /// Medium qualifier 8 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i8, mediump> mediump_i8vec4; + + + /// High qualifier 8 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i8, highp> highp_i8vec1; + + /// High qualifier 8 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i8, highp> highp_i8vec2; + + /// High qualifier 8 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i8, highp> highp_i8vec3; + + /// High qualifier 8 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i8, highp> highp_i8vec4; + + + + /// 8 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i8, defaultp> i8vec1; + + /// 8 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i8, defaultp> i8vec2; + + /// 8 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i8, defaultp> i8vec3; + + /// 8 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i8, defaultp> i8vec4; + + + + + + /// Low qualifier 16 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i16, lowp> lowp_i16vec1; + + /// Low qualifier 16 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i16, lowp> lowp_i16vec2; + + /// Low qualifier 16 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i16, lowp> lowp_i16vec3; + + /// Low qualifier 16 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i16, lowp> lowp_i16vec4; + + + /// Medium qualifier 16 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i16, mediump> mediump_i16vec1; + + /// Medium qualifier 16 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i16, mediump> mediump_i16vec2; + + /// Medium qualifier 16 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i16, mediump> mediump_i16vec3; + + /// Medium qualifier 16 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i16, mediump> mediump_i16vec4; + + + /// High qualifier 16 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i16, highp> highp_i16vec1; + + /// High qualifier 16 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i16, highp> highp_i16vec2; + + /// High qualifier 16 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i16, highp> highp_i16vec3; + + /// High qualifier 16 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i16, highp> highp_i16vec4; + + + + + /// 16 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i16, defaultp> i16vec1; + + /// 16 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i16, defaultp> i16vec2; + + /// 16 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i16, defaultp> i16vec3; + + /// 16 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i16, defaultp> i16vec4; + + + + /// Low qualifier 32 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i32, lowp> lowp_i32vec1; + + /// Low qualifier 32 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i32, lowp> lowp_i32vec2; + + /// Low qualifier 32 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i32, lowp> lowp_i32vec3; + + /// Low qualifier 32 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i32, lowp> lowp_i32vec4; + + + /// Medium qualifier 32 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i32, mediump> mediump_i32vec1; + + /// Medium qualifier 32 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i32, mediump> mediump_i32vec2; + + /// Medium qualifier 32 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i32, mediump> mediump_i32vec3; + + /// Medium qualifier 32 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i32, mediump> mediump_i32vec4; + + + /// High qualifier 32 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i32, highp> highp_i32vec1; + + /// High qualifier 32 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i32, highp> highp_i32vec2; + + /// High qualifier 32 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i32, highp> highp_i32vec3; + + /// High qualifier 32 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i32, highp> highp_i32vec4; + + + /// 32 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i32, defaultp> i32vec1; + + /// 32 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i32, defaultp> i32vec2; + + /// 32 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i32, defaultp> i32vec3; + + /// 32 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i32, defaultp> i32vec4; + + + + + /// Low qualifier 64 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i64, lowp> lowp_i64vec1; + + /// Low qualifier 64 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i64, lowp> lowp_i64vec2; + + /// Low qualifier 64 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i64, lowp> lowp_i64vec3; + + /// Low qualifier 64 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i64, lowp> lowp_i64vec4; + + + /// Medium qualifier 64 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i64, mediump> mediump_i64vec1; + + /// Medium qualifier 64 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i64, mediump> mediump_i64vec2; + + /// Medium qualifier 64 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i64, mediump> mediump_i64vec3; + + /// Medium qualifier 64 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i64, mediump> mediump_i64vec4; + + + /// High qualifier 64 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i64, highp> highp_i64vec1; + + /// High qualifier 64 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i64, highp> highp_i64vec2; + + /// High qualifier 64 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i64, highp> highp_i64vec3; + + /// High qualifier 64 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i64, highp> highp_i64vec4; + + + /// 64 bit signed integer scalar type. + /// @see gtc_type_precision + typedef vec<1, i64, defaultp> i64vec1; + + /// 64 bit signed integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, i64, defaultp> i64vec2; + + /// 64 bit signed integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, i64, defaultp> i64vec3; + + /// 64 bit signed integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, i64, defaultp> i64vec4; + + + ///////////////////////////// + // Unsigned int vector types + + /// Low qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 lowp_uint8; + + /// Low qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 lowp_uint16; + + /// Low qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 lowp_uint32; + + /// Low qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 lowp_uint64; + + /// Low qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 lowp_uint8_t; + + /// Low qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 lowp_uint16_t; + + /// Low qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 lowp_uint32_t; + + /// Low qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 lowp_uint64_t; + + /// Low qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 lowp_u8; + + /// Low qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 lowp_u16; + + /// Low qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 lowp_u32; + + /// Low qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 lowp_u64; + + /// Medium qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 mediump_uint8; + + /// Medium qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 mediump_uint16; + + /// Medium qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 mediump_uint32; + + /// Medium qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 mediump_uint64; + + /// Medium qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 mediump_uint8_t; + + /// Medium qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 mediump_uint16_t; + + /// Medium qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 mediump_uint32_t; + + /// Medium qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 mediump_uint64_t; + + /// Medium qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 mediump_u8; + + /// Medium qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 mediump_u16; + + /// Medium qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 mediump_u32; + + /// Medium qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 mediump_u64; + + /// High qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 highp_uint8; + + /// High qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 highp_uint16; + + /// High qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 highp_uint32; + + /// High qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 highp_uint64; + + /// High qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 highp_uint8_t; + + /// High qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 highp_uint16_t; + + /// High qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 highp_uint32_t; + + /// High qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 highp_uint64_t; + + /// High qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 highp_u8; + + /// High qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 highp_u16; + + /// High qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 highp_u32; + + /// High qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 highp_u64; + +#if GLM_HAS_EXTENDED_INTEGER_TYPE + using std::uint8_t; + using std::uint16_t; + using std::uint32_t; + using std::uint64_t; +#else + /// Default qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 uint8_t; + + /// Default qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 uint16_t; + + /// Default qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 uint32_t; + + /// Default qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 uint64_t; +#endif + + /// Default qualifier 8 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint8 u8; + + /// Default qualifier 16 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint16 u16; + + /// Default qualifier 32 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint32 u32; + + /// Default qualifier 64 bit unsigned integer type. + /// @see gtc_type_precision + typedef detail::uint64 u64; + + + + + + ////////////////////// + // Float vector types + + /// Single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float float32; + + /// Double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef double float64; + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_float32; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_float64; + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_float32_t; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_float64_t; + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_f32; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_f64; + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_float32; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_float64; + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_float32_t; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_float64_t; + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_f32; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_f64; + + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_float32; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_float64; + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_float32_t; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_float64_t; + + /// Low 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 lowp_f32; + + /// Low 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 lowp_f64; + + + /// Medium 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 mediump_float32; + + /// Medium 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 mediump_float64; + + /// Medium 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 mediump_float32_t; + + /// Medium 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 mediump_float64_t; + + /// Medium 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 mediump_f32; + + /// Medium 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 mediump_f64; + + + /// High 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 highp_float32; + + /// High 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 highp_float64; + + /// High 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 highp_float32_t; + + /// High 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 highp_float64_t; + + /// High 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 highp_f32; + + /// High 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 highp_f64; + + +#if(defined(GLM_PRECISION_LOWP_FLOAT)) + /// Default 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef lowp_float32_t float32_t; + + /// Default 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef lowp_float64_t float64_t; + + /// Default 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef lowp_f32 f32; + + /// Default 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef lowp_f64 f64; + +#elif(defined(GLM_PRECISION_MEDIUMP_FLOAT)) + /// Default 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef mediump_float32 float32_t; + + /// Default 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef mediump_float64 float64_t; + + /// Default 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef mediump_float32 f32; + + /// Default 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef mediump_float64 f64; + +#else//(defined(GLM_PRECISION_HIGHP_FLOAT)) + + /// Default 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef highp_float32_t float32_t; + + /// Default 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef highp_float64_t float64_t; + + /// Default 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef highp_float32_t f32; + + /// Default 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef highp_float64_t f64; +#endif + + + /// Low single-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, float, lowp> lowp_fvec1; + + /// Low single-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, float, lowp> lowp_fvec2; + + /// Low single-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, float, lowp> lowp_fvec3; + + /// Low single-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, float, lowp> lowp_fvec4; + + + /// Medium single-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, float, mediump> mediump_fvec1; + + /// Medium Single-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, float, mediump> mediump_fvec2; + + /// Medium Single-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, float, mediump> mediump_fvec3; + + /// Medium Single-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, float, mediump> mediump_fvec4; + + + /// High single-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, float, highp> highp_fvec1; + + /// High Single-qualifier floating-point vector of 2 components. + /// @see core_precision + typedef vec<2, float, highp> highp_fvec2; + + /// High Single-qualifier floating-point vector of 3 components. + /// @see core_precision + typedef vec<3, float, highp> highp_fvec3; + + /// High Single-qualifier floating-point vector of 4 components. + /// @see core_precision + typedef vec<4, float, highp> highp_fvec4; + + + /// Low single-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, f32, lowp> lowp_f32vec1; + + /// Low single-qualifier floating-point vector of 2 components. + /// @see core_precision + typedef vec<2, f32, lowp> lowp_f32vec2; + + /// Low single-qualifier floating-point vector of 3 components. + /// @see core_precision + typedef vec<3, f32, lowp> lowp_f32vec3; + + /// Low single-qualifier floating-point vector of 4 components. + /// @see core_precision + typedef vec<4, f32, lowp> lowp_f32vec4; + + /// Medium single-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, f32, mediump> mediump_f32vec1; + + /// Medium single-qualifier floating-point vector of 2 components. + /// @see core_precision + typedef vec<2, f32, mediump> mediump_f32vec2; + + /// Medium single-qualifier floating-point vector of 3 components. + /// @see core_precision + typedef vec<3, f32, mediump> mediump_f32vec3; + + /// Medium single-qualifier floating-point vector of 4 components. + /// @see core_precision + typedef vec<4, f32, mediump> mediump_f32vec4; + + /// High single-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, f32, highp> highp_f32vec1; + + /// High single-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, f32, highp> highp_f32vec2; + + /// High single-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, f32, highp> highp_f32vec3; + + /// High single-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, f32, highp> highp_f32vec4; + + + /// Low double-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, f64, lowp> lowp_f64vec1; + + /// Low double-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, f64, lowp> lowp_f64vec2; + + /// Low double-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, f64, lowp> lowp_f64vec3; + + /// Low double-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, f64, lowp> lowp_f64vec4; + + /// Medium double-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, f64, mediump> mediump_f64vec1; + + /// Medium double-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, f64, mediump> mediump_f64vec2; + + /// Medium double-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, f64, mediump> mediump_f64vec3; + + /// Medium double-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, f64, mediump> mediump_f64vec4; + + /// High double-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, f64, highp> highp_f64vec1; + + /// High double-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, f64, highp> highp_f64vec2; + + /// High double-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, f64, highp> highp_f64vec3; + + /// High double-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, f64, highp> highp_f64vec4; + + + + ////////////////////// + // Float matrix types + + /// Low single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef lowp_f32 lowp_fmat1x1; + + /// Low single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, lowp> lowp_fmat2x2; + + /// Low single-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f32, lowp> lowp_fmat2x3; + + /// Low single-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f32, lowp> lowp_fmat2x4; + + /// Low single-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f32, lowp> lowp_fmat3x2; + + /// Low single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, lowp> lowp_fmat3x3; + + /// Low single-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f32, lowp> lowp_fmat3x4; + + /// Low single-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f32, lowp> lowp_fmat4x2; + + /// Low single-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f32, lowp> lowp_fmat4x3; + + /// Low single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, lowp> lowp_fmat4x4; + + /// Low single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef lowp_fmat1x1 lowp_fmat1; + + /// Low single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef lowp_fmat2x2 lowp_fmat2; + + /// Low single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef lowp_fmat3x3 lowp_fmat3; + + /// Low single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef lowp_fmat4x4 lowp_fmat4; + + + /// Medium single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef mediump_f32 mediump_fmat1x1; + + /// Medium single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, mediump> mediump_fmat2x2; + + /// Medium single-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f32, mediump> mediump_fmat2x3; + + /// Medium single-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f32, mediump> mediump_fmat2x4; + + /// Medium single-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f32, mediump> mediump_fmat3x2; + + /// Medium single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, mediump> mediump_fmat3x3; + + /// Medium single-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f32, mediump> mediump_fmat3x4; + + /// Medium single-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f32, mediump> mediump_fmat4x2; + + /// Medium single-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f32, mediump> mediump_fmat4x3; + + /// Medium single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, mediump> mediump_fmat4x4; + + /// Medium single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef mediump_fmat1x1 mediump_fmat1; + + /// Medium single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mediump_fmat2x2 mediump_fmat2; + + /// Medium single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mediump_fmat3x3 mediump_fmat3; + + /// Medium single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mediump_fmat4x4 mediump_fmat4; + + + /// High single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef highp_f32 highp_fmat1x1; + + /// High single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, highp> highp_fmat2x2; + + /// High single-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f32, highp> highp_fmat2x3; + + /// High single-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f32, highp> highp_fmat2x4; + + /// High single-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f32, highp> highp_fmat3x2; + + /// High single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, highp> highp_fmat3x3; + + /// High single-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f32, highp> highp_fmat3x4; + + /// High single-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f32, highp> highp_fmat4x2; + + /// High single-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f32, highp> highp_fmat4x3; + + /// High single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, highp> highp_fmat4x4; + + /// High single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef highp_fmat1x1 highp_fmat1; + + /// High single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef highp_fmat2x2 highp_fmat2; + + /// High single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef highp_fmat3x3 highp_fmat3; + + /// High single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef highp_fmat4x4 highp_fmat4; + + + /// Low single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f32 lowp_f32mat1x1; + + /// Low single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, lowp> lowp_f32mat2x2; + + /// Low single-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f32, lowp> lowp_f32mat2x3; + + /// Low single-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f32, lowp> lowp_f32mat2x4; + + /// Low single-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f32, lowp> lowp_f32mat3x2; + + /// Low single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, lowp> lowp_f32mat3x3; + + /// Low single-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f32, lowp> lowp_f32mat3x4; + + /// Low single-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f32, lowp> lowp_f32mat4x2; + + /// Low single-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f32, lowp> lowp_f32mat4x3; + + /// Low single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, lowp> lowp_f32mat4x4; + + /// Low single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef detail::tmat1x1 lowp_f32mat1; + + /// Low single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef lowp_f32mat2x2 lowp_f32mat2; + + /// Low single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef lowp_f32mat3x3 lowp_f32mat3; + + /// Low single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef lowp_f32mat4x4 lowp_f32mat4; + + + /// High single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f32 mediump_f32mat1x1; + + /// Low single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, mediump> mediump_f32mat2x2; + + /// Medium single-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f32, mediump> mediump_f32mat2x3; + + /// Medium single-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f32, mediump> mediump_f32mat2x4; + + /// Medium single-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f32, mediump> mediump_f32mat3x2; + + /// Medium single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, mediump> mediump_f32mat3x3; + + /// Medium single-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f32, mediump> mediump_f32mat3x4; + + /// Medium single-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f32, mediump> mediump_f32mat4x2; + + /// Medium single-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f32, mediump> mediump_f32mat4x3; + + /// Medium single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, mediump> mediump_f32mat4x4; + + /// Medium single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef detail::tmat1x1 f32mat1; + + /// Medium single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mediump_f32mat2x2 mediump_f32mat2; + + /// Medium single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mediump_f32mat3x3 mediump_f32mat3; + + /// Medium single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mediump_f32mat4x4 mediump_f32mat4; + + + /// High single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f32 highp_f32mat1x1; + + /// High single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, highp> highp_f32mat2x2; + + /// High single-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f32, highp> highp_f32mat2x3; + + /// High single-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f32, highp> highp_f32mat2x4; + + /// High single-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f32, highp> highp_f32mat3x2; + + /// High single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, highp> highp_f32mat3x3; + + /// High single-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f32, highp> highp_f32mat3x4; + + /// High single-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f32, highp> highp_f32mat4x2; + + /// High single-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f32, highp> highp_f32mat4x3; + + /// High single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, highp> highp_f32mat4x4; + + /// High single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef detail::tmat1x1 f32mat1; + + /// High single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef highp_f32mat2x2 highp_f32mat2; + + /// High single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef highp_f32mat3x3 highp_f32mat3; + + /// High single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef highp_f32mat4x4 highp_f32mat4; + + + /// Low double-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f64 lowp_f64mat1x1; + + /// Low double-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f64, lowp> lowp_f64mat2x2; + + /// Low double-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f64, lowp> lowp_f64mat2x3; + + /// Low double-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f64, lowp> lowp_f64mat2x4; + + /// Low double-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f64, lowp> lowp_f64mat3x2; + + /// Low double-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f64, lowp> lowp_f64mat3x3; + + /// Low double-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f64, lowp> lowp_f64mat3x4; + + /// Low double-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f64, lowp> lowp_f64mat4x2; + + /// Low double-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f64, lowp> lowp_f64mat4x3; + + /// Low double-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f64, lowp> lowp_f64mat4x4; + + /// Low double-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef lowp_f64mat1x1 lowp_f64mat1; + + /// Low double-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef lowp_f64mat2x2 lowp_f64mat2; + + /// Low double-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef lowp_f64mat3x3 lowp_f64mat3; + + /// Low double-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef lowp_f64mat4x4 lowp_f64mat4; + + + /// Medium double-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f64 Highp_f64mat1x1; + + /// Medium double-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f64, mediump> mediump_f64mat2x2; + + /// Medium double-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f64, mediump> mediump_f64mat2x3; + + /// Medium double-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f64, mediump> mediump_f64mat2x4; + + /// Medium double-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f64, mediump> mediump_f64mat3x2; + + /// Medium double-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f64, mediump> mediump_f64mat3x3; + + /// Medium double-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f64, mediump> mediump_f64mat3x4; + + /// Medium double-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f64, mediump> mediump_f64mat4x2; + + /// Medium double-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f64, mediump> mediump_f64mat4x3; + + /// Medium double-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f64, mediump> mediump_f64mat4x4; + + /// Medium double-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef mediump_f64mat1x1 mediump_f64mat1; + + /// Medium double-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mediump_f64mat2x2 mediump_f64mat2; + + /// Medium double-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mediump_f64mat3x3 mediump_f64mat3; + + /// Medium double-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mediump_f64mat4x4 mediump_f64mat4; + + /// High double-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f64 highp_f64mat1x1; + + /// High double-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f64, highp> highp_f64mat2x2; + + /// High double-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f64, highp> highp_f64mat2x3; + + /// High double-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f64, highp> highp_f64mat2x4; + + /// High double-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f64, highp> highp_f64mat3x2; + + /// High double-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f64, highp> highp_f64mat3x3; + + /// High double-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f64, highp> highp_f64mat3x4; + + /// High double-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f64, highp> highp_f64mat4x2; + + /// High double-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f64, highp> highp_f64mat4x3; + + /// High double-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f64, highp> highp_f64mat4x4; + + /// High double-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef highp_f64mat1x1 highp_f64mat1; + + /// High double-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef highp_f64mat2x2 highp_f64mat2; + + /// High double-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef highp_f64mat3x3 highp_f64mat3; + + /// High double-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef highp_f64mat4x4 highp_f64mat4; + + + + + /// Low qualifier 8 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u8, lowp> lowp_u8vec1; + + /// Low qualifier 8 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u8, lowp> lowp_u8vec2; + + /// Low qualifier 8 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u8, lowp> lowp_u8vec3; + + /// Low qualifier 8 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u8, lowp> lowp_u8vec4; + + + /// Medium qualifier 8 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u8, mediump> mediump_u8vec1; + + /// Medium qualifier 8 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u8, mediump> mediump_u8vec2; + + /// Medium qualifier 8 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u8, mediump> mediump_u8vec3; + + /// Medium qualifier 8 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u8, mediump> mediump_u8vec4; + + + /// High qualifier 8 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u8, highp> highp_u8vec1; + + /// High qualifier 8 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u8, highp> highp_u8vec2; + + /// High qualifier 8 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u8, highp> highp_u8vec3; + + /// High qualifier 8 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u8, highp> highp_u8vec4; + + + + /// Default qualifier 8 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u8, defaultp> u8vec1; + + /// Default qualifier 8 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u8, defaultp> u8vec2; + + /// Default qualifier 8 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u8, defaultp> u8vec3; + + /// Default qualifier 8 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u8, defaultp> u8vec4; + + + + + /// Low qualifier 16 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u16, lowp> lowp_u16vec1; + + /// Low qualifier 16 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u16, lowp> lowp_u16vec2; + + /// Low qualifier 16 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u16, lowp> lowp_u16vec3; + + /// Low qualifier 16 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u16, lowp> lowp_u16vec4; + + + /// Medium qualifier 16 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u16, mediump> mediump_u16vec1; + + /// Medium qualifier 16 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u16, mediump> mediump_u16vec2; + + /// Medium qualifier 16 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u16, mediump> mediump_u16vec3; + + /// Medium qualifier 16 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u16, mediump> mediump_u16vec4; + + + /// High qualifier 16 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u16, highp> highp_u16vec1; + + /// High qualifier 16 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u16, highp> highp_u16vec2; + + /// High qualifier 16 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u16, highp> highp_u16vec3; + + /// High qualifier 16 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u16, highp> highp_u16vec4; + + + + + /// Default qualifier 16 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u16, defaultp> u16vec1; + + /// Default qualifier 16 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u16, defaultp> u16vec2; + + /// Default qualifier 16 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u16, defaultp> u16vec3; + + /// Default qualifier 16 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u16, defaultp> u16vec4; + + + + /// Low qualifier 32 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u32, lowp> lowp_u32vec1; + + /// Low qualifier 32 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u32, lowp> lowp_u32vec2; + + /// Low qualifier 32 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u32, lowp> lowp_u32vec3; + + /// Low qualifier 32 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u32, lowp> lowp_u32vec4; + + + /// Medium qualifier 32 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u32, mediump> mediump_u32vec1; + + /// Medium qualifier 32 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u32, mediump> mediump_u32vec2; + + /// Medium qualifier 32 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u32, mediump> mediump_u32vec3; + + /// Medium qualifier 32 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u32, mediump> mediump_u32vec4; + + + /// High qualifier 32 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u32, highp> highp_u32vec1; + + /// High qualifier 32 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u32, highp> highp_u32vec2; + + /// High qualifier 32 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u32, highp> highp_u32vec3; + + /// High qualifier 32 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u32, highp> highp_u32vec4; + + + + /// Default qualifier 32 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u32, defaultp> u32vec1; + + /// Default qualifier 32 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u32, defaultp> u32vec2; + + /// Default qualifier 32 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u32, defaultp> u32vec3; + + /// Default qualifier 32 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u32, defaultp> u32vec4; + + + + + /// Low qualifier 64 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u64, lowp> lowp_u64vec1; + + /// Low qualifier 64 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u64, lowp> lowp_u64vec2; + + /// Low qualifier 64 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u64, lowp> lowp_u64vec3; + + /// Low qualifier 64 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u64, lowp> lowp_u64vec4; + + + /// Medium qualifier 64 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u64, mediump> mediump_u64vec1; + + /// Medium qualifier 64 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u64, mediump> mediump_u64vec2; + + /// Medium qualifier 64 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u64, mediump> mediump_u64vec3; + + /// Medium qualifier 64 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u64, mediump> mediump_u64vec4; + + + /// High qualifier 64 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u64, highp> highp_u64vec1; + + /// High qualifier 64 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u64, highp> highp_u64vec2; + + /// High qualifier 64 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u64, highp> highp_u64vec3; + + /// High qualifier 64 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u64, highp> highp_u64vec4; + + + + + /// Default qualifier 64 bit unsigned integer scalar type. + /// @see gtc_type_precision + typedef vec<1, u64, defaultp> u64vec1; + + /// Default qualifier 64 bit unsigned integer vector of 2 components type. + /// @see gtc_type_precision + typedef vec<2, u64, defaultp> u64vec2; + + /// Default qualifier 64 bit unsigned integer vector of 3 components type. + /// @see gtc_type_precision + typedef vec<3, u64, defaultp> u64vec3; + + /// Default qualifier 64 bit unsigned integer vector of 4 components type. + /// @see gtc_type_precision + typedef vec<4, u64, defaultp> u64vec4; + + + ////////////////////// + // Float vector types + + /// 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 float32_t; + + /// 32 bit single-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float32 f32; + +# ifndef GLM_FORCE_SINGLE_ONLY + + /// 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 float64_t; + + /// 64 bit double-qualifier floating-point scalar. + /// @see gtc_type_precision + typedef float64 f64; +# endif//GLM_FORCE_SINGLE_ONLY + + /// Single-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, float, defaultp> fvec1; + + /// Single-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, float, defaultp> fvec2; + + /// Single-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, float, defaultp> fvec3; + + /// Single-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, float, defaultp> fvec4; + + + /// Single-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, f32, defaultp> f32vec1; + + /// Single-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, f32, defaultp> f32vec2; + + /// Single-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, f32, defaultp> f32vec3; + + /// Single-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, f32, defaultp> f32vec4; + +# ifndef GLM_FORCE_SINGLE_ONLY + /// Double-qualifier floating-point vector of 1 component. + /// @see gtc_type_precision + typedef vec<1, f64, defaultp> f64vec1; + + /// Double-qualifier floating-point vector of 2 components. + /// @see gtc_type_precision + typedef vec<2, f64, defaultp> f64vec2; + + /// Double-qualifier floating-point vector of 3 components. + /// @see gtc_type_precision + typedef vec<3, f64, defaultp> f64vec3; + + /// Double-qualifier floating-point vector of 4 components. + /// @see gtc_type_precision + typedef vec<4, f64, defaultp> f64vec4; +# endif//GLM_FORCE_SINGLE_ONLY + + + ////////////////////// + // Float matrix types + + /// Single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef detail::tmat1x1 fmat1; + + /// Single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, defaultp> fmat2; + + /// Single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, defaultp> fmat3; + + /// Single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, defaultp> fmat4; + + + /// Single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f32 fmat1x1; + + /// Single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, defaultp> fmat2x2; + + /// Single-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f32, defaultp> fmat2x3; + + /// Single-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f32, defaultp> fmat2x4; + + /// Single-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f32, defaultp> fmat3x2; + + /// Single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, defaultp> fmat3x3; + + /// Single-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f32, defaultp> fmat3x4; + + /// Single-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f32, defaultp> fmat4x2; + + /// Single-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f32, defaultp> fmat4x3; + + /// Single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, defaultp> fmat4x4; + + + /// Single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef detail::tmat1x1 f32mat1; + + /// Single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, defaultp> f32mat2; + + /// Single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, defaultp> f32mat3; + + /// Single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, defaultp> f32mat4; + + + /// Single-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f32 f32mat1x1; + + /// Single-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f32, defaultp> f32mat2x2; + + /// Single-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f32, defaultp> f32mat2x3; + + /// Single-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f32, defaultp> f32mat2x4; + + /// Single-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f32, defaultp> f32mat3x2; + + /// Single-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f32, defaultp> f32mat3x3; + + /// Single-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f32, defaultp> f32mat3x4; + + /// Single-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f32, defaultp> f32mat4x2; + + /// Single-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f32, defaultp> f32mat4x3; + + /// Single-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f32, defaultp> f32mat4x4; + + +# ifndef GLM_FORCE_SINGLE_ONLY + + /// Double-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef detail::tmat1x1 f64mat1; + + /// Double-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f64, defaultp> f64mat2; + + /// Double-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f64, defaultp> f64mat3; + + /// Double-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f64, defaultp> f64mat4; + + + /// Double-qualifier floating-point 1x1 matrix. + /// @see gtc_type_precision + //typedef f64 f64mat1x1; + + /// Double-qualifier floating-point 2x2 matrix. + /// @see gtc_type_precision + typedef mat<2, 2, f64, defaultp> f64mat2x2; + + /// Double-qualifier floating-point 2x3 matrix. + /// @see gtc_type_precision + typedef mat<2, 3, f64, defaultp> f64mat2x3; + + /// Double-qualifier floating-point 2x4 matrix. + /// @see gtc_type_precision + typedef mat<2, 4, f64, defaultp> f64mat2x4; + + /// Double-qualifier floating-point 3x2 matrix. + /// @see gtc_type_precision + typedef mat<3, 2, f64, defaultp> f64mat3x2; + + /// Double-qualifier floating-point 3x3 matrix. + /// @see gtc_type_precision + typedef mat<3, 3, f64, defaultp> f64mat3x3; + + /// Double-qualifier floating-point 3x4 matrix. + /// @see gtc_type_precision + typedef mat<3, 4, f64, defaultp> f64mat3x4; + + /// Double-qualifier floating-point 4x2 matrix. + /// @see gtc_type_precision + typedef mat<4, 2, f64, defaultp> f64mat4x2; + + /// Double-qualifier floating-point 4x3 matrix. + /// @see gtc_type_precision + typedef mat<4, 3, f64, defaultp> f64mat4x3; + + /// Double-qualifier floating-point 4x4 matrix. + /// @see gtc_type_precision + typedef mat<4, 4, f64, defaultp> f64mat4x4; + +# endif//GLM_FORCE_SINGLE_ONLY + + ////////////////////////// + // Quaternion types + + /// Single-qualifier floating-point quaternion. + /// @see gtc_type_precision + typedef qua f32quat; + + /// Low single-qualifier floating-point quaternion. + /// @see gtc_type_precision + typedef qua lowp_f32quat; + + /// Low double-qualifier floating-point quaternion. + /// @see gtc_type_precision + typedef qua lowp_f64quat; + + /// Medium single-qualifier floating-point quaternion. + /// @see gtc_type_precision + typedef qua mediump_f32quat; + +# ifndef GLM_FORCE_SINGLE_ONLY + + /// Medium double-qualifier floating-point quaternion. + /// @see gtc_type_precision + typedef qua mediump_f64quat; + + /// High single-qualifier floating-point quaternion. + /// @see gtc_type_precision + typedef qua highp_f32quat; + + /// High double-qualifier floating-point quaternion. + /// @see gtc_type_precision + typedef qua highp_f64quat; + + /// Double-qualifier floating-point quaternion. + /// @see gtc_type_precision + typedef qua f64quat; + +# endif//GLM_FORCE_SINGLE_ONLY + + /// @} +}//namespace glm + +#include "type_precision.inl" diff --git a/MacroLibX/third_party/glm/gtc/type_precision.inl b/MacroLibX/third_party/glm/gtc/type_precision.inl new file mode 100644 index 0000000..ae80912 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/type_precision.inl @@ -0,0 +1,6 @@ +/// @ref gtc_precision + +namespace glm +{ + +} diff --git a/MacroLibX/third_party/glm/gtc/type_ptr.hpp b/MacroLibX/third_party/glm/gtc/type_ptr.hpp new file mode 100644 index 0000000..d7e625a --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/type_ptr.hpp @@ -0,0 +1,230 @@ +/// @ref gtc_type_ptr +/// @file glm/gtc/type_ptr.hpp +/// +/// @see core (dependence) +/// @see gtc_quaternion (dependence) +/// +/// @defgroup gtc_type_ptr GLM_GTC_type_ptr +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Handles the interaction between pointers and vector, matrix types. +/// +/// This extension defines an overloaded function, glm::value_ptr. It returns +/// a pointer to the memory layout of the object. Matrix types store their values +/// in column-major order. +/// +/// This is useful for uploading data to matrices or copying data to buffer objects. +/// +/// Example: +/// @code +/// #include +/// #include +/// +/// glm::vec3 aVector(3); +/// glm::mat4 someMatrix(1.0); +/// +/// glUniform3fv(uniformLoc, 1, glm::value_ptr(aVector)); +/// glUniformMatrix4fv(uniformMatrixLoc, 1, GL_FALSE, glm::value_ptr(someMatrix)); +/// @endcode +/// +/// need to be included to use the features of this extension. + +#pragma once + +// Dependency: +#include "../gtc/quaternion.hpp" +#include "../gtc/vec1.hpp" +#include "../vec2.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" +#include "../mat2x2.hpp" +#include "../mat2x3.hpp" +#include "../mat2x4.hpp" +#include "../mat3x2.hpp" +#include "../mat3x3.hpp" +#include "../mat3x4.hpp" +#include "../mat4x2.hpp" +#include "../mat4x3.hpp" +#include "../mat4x4.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_type_ptr extension included") +#endif + +namespace glm +{ + /// @addtogroup gtc_type_ptr + /// @{ + + /// Return the constant address to the data of the input parameter. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL typename genType::value_type const * value_ptr(genType const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<1, T, Q> make_vec1(vec<1, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<1, T, Q> make_vec1(vec<2, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<1, T, Q> make_vec1(vec<3, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<1, T, Q> make_vec1(vec<4, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<2, T, Q> make_vec2(vec<1, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<2, T, Q> make_vec2(vec<2, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<2, T, Q> make_vec2(vec<3, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<2, T, Q> make_vec2(vec<4, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<3, T, Q> make_vec3(vec<1, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<3, T, Q> make_vec3(vec<2, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<3, T, Q> make_vec3(vec<3, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<3, T, Q> make_vec3(vec<4, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<4, T, Q> make_vec4(vec<1, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<4, T, Q> make_vec4(vec<2, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<4, T, Q> make_vec4(vec<3, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<4, T, Q> make_vec4(vec<4, T, Q> const& v); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<2, T, defaultp> make_vec2(T const * const ptr); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<3, T, defaultp> make_vec3(T const * const ptr); + + /// Build a vector from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL vec<4, T, defaultp> make_vec4(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<2, 2, T, defaultp> make_mat2x2(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<2, 3, T, defaultp> make_mat2x3(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<2, 4, T, defaultp> make_mat2x4(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<3, 2, T, defaultp> make_mat3x2(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<3, 3, T, defaultp> make_mat3x3(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<3, 4, T, defaultp> make_mat3x4(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<4, 2, T, defaultp> make_mat4x2(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<4, 3, T, defaultp> make_mat4x3(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> make_mat4x4(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<2, 2, T, defaultp> make_mat2(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<3, 3, T, defaultp> make_mat3(T const * const ptr); + + /// Build a matrix from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> make_mat4(T const * const ptr); + + /// Build a quaternion from a pointer. + /// @see gtc_type_ptr + template + GLM_FUNC_DECL qua make_quat(T const * const ptr); + + /// @} +}//namespace glm + +#include "type_ptr.inl" diff --git a/MacroLibX/third_party/glm/gtc/type_ptr.inl b/MacroLibX/third_party/glm/gtc/type_ptr.inl new file mode 100644 index 0000000..71df4d3 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/type_ptr.inl @@ -0,0 +1,386 @@ +/// @ref gtc_type_ptr + +#include + +namespace glm +{ + /// @addtogroup gtc_type_ptr + /// @{ + + template + GLM_FUNC_QUALIFIER T const* value_ptr(vec<2, T, Q> const& v) + { + return &(v.x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(vec<2, T, Q>& v) + { + return &(v.x); + } + + template + GLM_FUNC_QUALIFIER T const * value_ptr(vec<3, T, Q> const& v) + { + return &(v.x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(vec<3, T, Q>& v) + { + return &(v.x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(vec<4, T, Q> const& v) + { + return &(v.x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(vec<4, T, Q>& v) + { + return &(v.x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<2, 2, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(mat<2, 2, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<3, 3, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(mat<3, 3, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<4, 4, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(mat<4, 4, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<2, 3, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(mat<2, 3, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<3, 2, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(mat<3, 2, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<2, 4, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(mat<2, 4, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<4, 2, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(mat<4, 2, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<3, 4, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(mat<3, 4, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const* value_ptr(mat<4, 3, T, Q> const& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T * value_ptr(mat<4, 3, T, Q>& m) + { + return &(m[0].x); + } + + template + GLM_FUNC_QUALIFIER T const * value_ptr(qua const& q) + { + return &(q[0]); + } + + template + GLM_FUNC_QUALIFIER T* value_ptr(qua& q) + { + return &(q[0]); + } + + template + inline vec<1, T, Q> make_vec1(vec<1, T, Q> const& v) + { + return v; + } + + template + inline vec<1, T, Q> make_vec1(vec<2, T, Q> const& v) + { + return vec<1, T, Q>(v); + } + + template + inline vec<1, T, Q> make_vec1(vec<3, T, Q> const& v) + { + return vec<1, T, Q>(v); + } + + template + inline vec<1, T, Q> make_vec1(vec<4, T, Q> const& v) + { + return vec<1, T, Q>(v); + } + + template + inline vec<2, T, Q> make_vec2(vec<1, T, Q> const& v) + { + return vec<2, T, Q>(v.x, static_cast(0)); + } + + template + inline vec<2, T, Q> make_vec2(vec<2, T, Q> const& v) + { + return v; + } + + template + inline vec<2, T, Q> make_vec2(vec<3, T, Q> const& v) + { + return vec<2, T, Q>(v); + } + + template + inline vec<2, T, Q> make_vec2(vec<4, T, Q> const& v) + { + return vec<2, T, Q>(v); + } + + template + inline vec<3, T, Q> make_vec3(vec<1, T, Q> const& v) + { + return vec<3, T, Q>(v.x, static_cast(0), static_cast(0)); + } + + template + inline vec<3, T, Q> make_vec3(vec<2, T, Q> const& v) + { + return vec<3, T, Q>(v.x, v.y, static_cast(0)); + } + + template + inline vec<3, T, Q> make_vec3(vec<3, T, Q> const& v) + { + return v; + } + + template + inline vec<3, T, Q> make_vec3(vec<4, T, Q> const& v) + { + return vec<3, T, Q>(v); + } + + template + inline vec<4, T, Q> make_vec4(vec<1, T, Q> const& v) + { + return vec<4, T, Q>(v.x, static_cast(0), static_cast(0), static_cast(1)); + } + + template + inline vec<4, T, Q> make_vec4(vec<2, T, Q> const& v) + { + return vec<4, T, Q>(v.x, v.y, static_cast(0), static_cast(1)); + } + + template + inline vec<4, T, Q> make_vec4(vec<3, T, Q> const& v) + { + return vec<4, T, Q>(v.x, v.y, v.z, static_cast(1)); + } + + template + inline vec<4, T, Q> make_vec4(vec<4, T, Q> const& v) + { + return v; + } + + template + GLM_FUNC_QUALIFIER vec<2, T, defaultp> make_vec2(T const *const ptr) + { + vec<2, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(vec<2, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, defaultp> make_vec3(T const *const ptr) + { + vec<3, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(vec<3, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, T, defaultp> make_vec4(T const *const ptr) + { + vec<4, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(vec<4, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, defaultp> make_mat2x2(T const *const ptr) + { + mat<2, 2, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<2, 2, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, defaultp> make_mat2x3(T const *const ptr) + { + mat<2, 3, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<2, 3, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, defaultp> make_mat2x4(T const *const ptr) + { + mat<2, 4, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<2, 4, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, defaultp> make_mat3x2(T const *const ptr) + { + mat<3, 2, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<3, 2, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, defaultp> make_mat3x3(T const *const ptr) + { + mat<3, 3, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<3, 3, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, defaultp> make_mat3x4(T const *const ptr) + { + mat<3, 4, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<3, 4, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, defaultp> make_mat4x2(T const *const ptr) + { + mat<4, 2, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<4, 2, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, defaultp> make_mat4x3(T const *const ptr) + { + mat<4, 3, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<4, 3, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> make_mat4x4(T const *const ptr) + { + mat<4, 4, T, defaultp> Result; + memcpy(value_ptr(Result), ptr, sizeof(mat<4, 4, T, defaultp>)); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, defaultp> make_mat2(T const *const ptr) + { + return make_mat2x2(ptr); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, defaultp> make_mat3(T const *const ptr) + { + return make_mat3x3(ptr); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> make_mat4(T const *const ptr) + { + return make_mat4x4(ptr); + } + + template + GLM_FUNC_QUALIFIER qua make_quat(T const *const ptr) + { + qua Result; + memcpy(value_ptr(Result), ptr, sizeof(qua)); + return Result; + } + + /// @} +}//namespace glm + diff --git a/MacroLibX/third_party/glm/gtc/ulp.hpp b/MacroLibX/third_party/glm/gtc/ulp.hpp new file mode 100644 index 0000000..0d80a75 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/ulp.hpp @@ -0,0 +1,152 @@ +/// @ref gtc_ulp +/// @file glm/gtc/ulp.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_ulp GLM_GTC_ulp +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Allow the measurement of the accuracy of a function against a reference +/// implementation. This extension works on floating-point data and provide results +/// in ULP. + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../detail/_vectorize.hpp" +#include "../ext/scalar_int_sized.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_ulp extension included") +#endif + +namespace glm +{ + /// Return the next ULP value(s) after the input value(s). + /// + /// @tparam genType A floating-point scalar type. + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL genType next_float(genType x); + + /// Return the previous ULP value(s) before the input value(s). + /// + /// @tparam genType A floating-point scalar type. + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL genType prev_float(genType x); + + /// Return the value(s) ULP distance after the input value(s). + /// + /// @tparam genType A floating-point scalar type. + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL genType next_float(genType x, int ULPs); + + /// Return the value(s) ULP distance before the input value(s). + /// + /// @tparam genType A floating-point scalar type. + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL genType prev_float(genType x, int ULPs); + + /// Return the distance in the number of ULP between 2 single-precision floating-point scalars. + /// + /// @see gtc_ulp + GLM_FUNC_DECL int float_distance(float x, float y); + + /// Return the distance in the number of ULP between 2 double-precision floating-point scalars. + /// + /// @see gtc_ulp + GLM_FUNC_DECL int64 float_distance(double x, double y); + + /// Return the next ULP value(s) after the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL vec next_float(vec const& x); + + /// Return the value(s) ULP distance after the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL vec next_float(vec const& x, int ULPs); + + /// Return the value(s) ULP distance after the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL vec next_float(vec const& x, vec const& ULPs); + + /// Return the previous ULP value(s) before the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL vec prev_float(vec const& x); + + /// Return the value(s) ULP distance before the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL vec prev_float(vec const& x, int ULPs); + + /// Return the value(s) ULP distance before the input value(s). + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL vec prev_float(vec const& x, vec const& ULPs); + + /// Return the distance in the number of ULP between 2 single-precision floating-point scalars. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL vec float_distance(vec const& x, vec const& y); + + /// Return the distance in the number of ULP between 2 double-precision floating-point scalars. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam Q Value from qualifier enum + /// + /// @see gtc_ulp + template + GLM_FUNC_DECL vec float_distance(vec const& x, vec const& y); + + /// @} +}//namespace glm + +#include "ulp.inl" diff --git a/MacroLibX/third_party/glm/gtc/ulp.inl b/MacroLibX/third_party/glm/gtc/ulp.inl new file mode 100644 index 0000000..4ecbd3f --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/ulp.inl @@ -0,0 +1,173 @@ +/// @ref gtc_ulp + +#include "../ext/scalar_ulp.hpp" + +namespace glm +{ + template<> + GLM_FUNC_QUALIFIER float next_float(float x) + { +# if GLM_HAS_CXX11_STL + return std::nextafter(x, std::numeric_limits::max()); +# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) + return detail::nextafterf(x, FLT_MAX); +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) + return __builtin_nextafterf(x, FLT_MAX); +# else + return nextafterf(x, FLT_MAX); +# endif + } + + template<> + GLM_FUNC_QUALIFIER double next_float(double x) + { +# if GLM_HAS_CXX11_STL + return std::nextafter(x, std::numeric_limits::max()); +# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) + return detail::nextafter(x, std::numeric_limits::max()); +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) + return __builtin_nextafter(x, DBL_MAX); +# else + return nextafter(x, DBL_MAX); +# endif + } + + template + GLM_FUNC_QUALIFIER T next_float(T x, int ULPs) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'next_float' only accept floating-point input"); + assert(ULPs >= 0); + + T temp = x; + for (int i = 0; i < ULPs; ++i) + temp = next_float(temp); + return temp; + } + + GLM_FUNC_QUALIFIER float prev_float(float x) + { +# if GLM_HAS_CXX11_STL + return std::nextafter(x, std::numeric_limits::min()); +# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) + return detail::nextafterf(x, FLT_MIN); +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) + return __builtin_nextafterf(x, FLT_MIN); +# else + return nextafterf(x, FLT_MIN); +# endif + } + + GLM_FUNC_QUALIFIER double prev_float(double x) + { +# if GLM_HAS_CXX11_STL + return std::nextafter(x, std::numeric_limits::min()); +# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) + return _nextafter(x, DBL_MIN); +# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) + return __builtin_nextafter(x, DBL_MIN); +# else + return nextafter(x, DBL_MIN); +# endif + } + + template + GLM_FUNC_QUALIFIER T prev_float(T x, int ULPs) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'prev_float' only accept floating-point input"); + assert(ULPs >= 0); + + T temp = x; + for (int i = 0; i < ULPs; ++i) + temp = prev_float(temp); + return temp; + } + + GLM_FUNC_QUALIFIER int float_distance(float x, float y) + { + detail::float_t const a(x); + detail::float_t const b(y); + + return abs(a.i - b.i); + } + + GLM_FUNC_QUALIFIER int64 float_distance(double x, double y) + { + detail::float_t const a(x); + detail::float_t const b(y); + + return abs(a.i - b.i); + } + + template + GLM_FUNC_QUALIFIER vec next_float(vec const& x) + { + vec Result; + for (length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = next_float(x[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec next_float(vec const& x, int ULPs) + { + vec Result; + for (length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = next_float(x[i], ULPs); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec next_float(vec const& x, vec const& ULPs) + { + vec Result; + for (length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = next_float(x[i], ULPs[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec prev_float(vec const& x) + { + vec Result; + for (length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = prev_float(x[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec prev_float(vec const& x, int ULPs) + { + vec Result; + for (length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = prev_float(x[i], ULPs); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec prev_float(vec const& x, vec const& ULPs) + { + vec Result; + for (length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = prev_float(x[i], ULPs[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec float_distance(vec const& x, vec const& y) + { + vec Result; + for (length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = float_distance(x[i], y[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER vec float_distance(vec const& x, vec const& y) + { + vec Result; + for (length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = float_distance(x[i], y[i]); + return Result; + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/gtc/vec1.hpp b/MacroLibX/third_party/glm/gtc/vec1.hpp new file mode 100644 index 0000000..c20be87 --- /dev/null +++ b/MacroLibX/third_party/glm/gtc/vec1.hpp @@ -0,0 +1,30 @@ +/// @ref gtc_vec1 +/// @file glm/gtc/vec1.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtc_vec1 GLM_GTC_vec1 +/// @ingroup gtc +/// +/// Include to use the features of this extension. +/// +/// Add vec1, ivec1, uvec1 and bvec1 types. + +#pragma once + +// Dependency: +#include "../ext/vector_bool1.hpp" +#include "../ext/vector_bool1_precision.hpp" +#include "../ext/vector_float1.hpp" +#include "../ext/vector_float1_precision.hpp" +#include "../ext/vector_double1.hpp" +#include "../ext/vector_double1_precision.hpp" +#include "../ext/vector_int1.hpp" +#include "../ext/vector_int1_precision.hpp" +#include "../ext/vector_uint1.hpp" +#include "../ext/vector_uint1_precision.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# pragma message("GLM: GLM_GTC_vec1 extension included") +#endif + diff --git a/MacroLibX/third_party/glm/gtx/associated_min_max.hpp b/MacroLibX/third_party/glm/gtx/associated_min_max.hpp new file mode 100644 index 0000000..d1a41c0 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/associated_min_max.hpp @@ -0,0 +1,207 @@ +/// @ref gtx_associated_min_max +/// @file glm/gtx/associated_min_max.hpp +/// +/// @see core (dependence) +/// @see gtx_extented_min_max (dependence) +/// +/// @defgroup gtx_associated_min_max GLM_GTX_associated_min_max +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// @brief Min and max functions that return associated values not the compared onces. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_associated_min_max is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_associated_min_max extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_associated_min_max + /// @{ + + /// Minimum comparison between 2 variables and returns 2 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL U associatedMin(T x, U a, T y, U b); + + /// Minimum comparison between 2 variables and returns 2 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec<2, U, Q> associatedMin( + vec const& x, vec const& a, + vec const& y, vec const& b); + + /// Minimum comparison between 2 variables and returns 2 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMin( + T x, const vec& a, + T y, const vec& b); + + /// Minimum comparison between 2 variables and returns 2 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMin( + vec const& x, U a, + vec const& y, U b); + + /// Minimum comparison between 3 variables and returns 3 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL U associatedMin( + T x, U a, + T y, U b, + T z, U c); + + /// Minimum comparison between 3 variables and returns 3 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMin( + vec const& x, vec const& a, + vec const& y, vec const& b, + vec const& z, vec const& c); + + /// Minimum comparison between 4 variables and returns 4 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL U associatedMin( + T x, U a, + T y, U b, + T z, U c, + T w, U d); + + /// Minimum comparison between 4 variables and returns 4 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMin( + vec const& x, vec const& a, + vec const& y, vec const& b, + vec const& z, vec const& c, + vec const& w, vec const& d); + + /// Minimum comparison between 4 variables and returns 4 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMin( + T x, vec const& a, + T y, vec const& b, + T z, vec const& c, + T w, vec const& d); + + /// Minimum comparison between 4 variables and returns 4 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMin( + vec const& x, U a, + vec const& y, U b, + vec const& z, U c, + vec const& w, U d); + + /// Maximum comparison between 2 variables and returns 2 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL U associatedMax(T x, U a, T y, U b); + + /// Maximum comparison between 2 variables and returns 2 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec<2, U, Q> associatedMax( + vec const& x, vec const& a, + vec const& y, vec const& b); + + /// Maximum comparison between 2 variables and returns 2 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMax( + T x, vec const& a, + T y, vec const& b); + + /// Maximum comparison between 2 variables and returns 2 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMax( + vec const& x, U a, + vec const& y, U b); + + /// Maximum comparison between 3 variables and returns 3 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL U associatedMax( + T x, U a, + T y, U b, + T z, U c); + + /// Maximum comparison between 3 variables and returns 3 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMax( + vec const& x, vec const& a, + vec const& y, vec const& b, + vec const& z, vec const& c); + + /// Maximum comparison between 3 variables and returns 3 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMax( + T x, vec const& a, + T y, vec const& b, + T z, vec const& c); + + /// Maximum comparison between 3 variables and returns 3 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMax( + vec const& x, U a, + vec const& y, U b, + vec const& z, U c); + + /// Maximum comparison between 4 variables and returns 4 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL U associatedMax( + T x, U a, + T y, U b, + T z, U c, + T w, U d); + + /// Maximum comparison between 4 variables and returns 4 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMax( + vec const& x, vec const& a, + vec const& y, vec const& b, + vec const& z, vec const& c, + vec const& w, vec const& d); + + /// Maximum comparison between 4 variables and returns 4 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMax( + T x, vec const& a, + T y, vec const& b, + T z, vec const& c, + T w, vec const& d); + + /// Maximum comparison between 4 variables and returns 4 associated variable values + /// @see gtx_associated_min_max + template + GLM_FUNC_DECL vec associatedMax( + vec const& x, U a, + vec const& y, U b, + vec const& z, U c, + vec const& w, U d); + + /// @} +} //namespace glm + +#include "associated_min_max.inl" diff --git a/MacroLibX/third_party/glm/gtx/associated_min_max.inl b/MacroLibX/third_party/glm/gtx/associated_min_max.inl new file mode 100644 index 0000000..5186c47 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/associated_min_max.inl @@ -0,0 +1,354 @@ +/// @ref gtx_associated_min_max + +namespace glm{ + +// Min comparison between 2 variables +template +GLM_FUNC_QUALIFIER U associatedMin(T x, U a, T y, U b) +{ + return x < y ? a : b; +} + +template +GLM_FUNC_QUALIFIER vec<2, U, Q> associatedMin +( + vec const& x, vec const& a, + vec const& y, vec const& b +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x[i] < y[i] ? a[i] : b[i]; + return Result; +} + +template +GLM_FUNC_QUALIFIER vec associatedMin +( + T x, const vec& a, + T y, const vec& b +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x < y ? a[i] : b[i]; + return Result; +} + +template +GLM_FUNC_QUALIFIER vec associatedMin +( + vec const& x, U a, + vec const& y, U b +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x[i] < y[i] ? a : b; + return Result; +} + +// Min comparison between 3 variables +template +GLM_FUNC_QUALIFIER U associatedMin +( + T x, U a, + T y, U b, + T z, U c +) +{ + U Result = x < y ? (x < z ? a : c) : (y < z ? b : c); + return Result; +} + +template +GLM_FUNC_QUALIFIER vec associatedMin +( + vec const& x, vec const& a, + vec const& y, vec const& b, + vec const& z, vec const& c +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x[i] < y[i] ? (x[i] < z[i] ? a[i] : c[i]) : (y[i] < z[i] ? b[i] : c[i]); + return Result; +} + +// Min comparison between 4 variables +template +GLM_FUNC_QUALIFIER U associatedMin +( + T x, U a, + T y, U b, + T z, U c, + T w, U d +) +{ + T Test1 = min(x, y); + T Test2 = min(z, w); + U Result1 = x < y ? a : b; + U Result2 = z < w ? c : d; + U Result = Test1 < Test2 ? Result1 : Result2; + return Result; +} + +// Min comparison between 4 variables +template +GLM_FUNC_QUALIFIER vec associatedMin +( + vec const& x, vec const& a, + vec const& y, vec const& b, + vec const& z, vec const& c, + vec const& w, vec const& d +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + { + T Test1 = min(x[i], y[i]); + T Test2 = min(z[i], w[i]); + U Result1 = x[i] < y[i] ? a[i] : b[i]; + U Result2 = z[i] < w[i] ? c[i] : d[i]; + Result[i] = Test1 < Test2 ? Result1 : Result2; + } + return Result; +} + +// Min comparison between 4 variables +template +GLM_FUNC_QUALIFIER vec associatedMin +( + T x, vec const& a, + T y, vec const& b, + T z, vec const& c, + T w, vec const& d +) +{ + T Test1 = min(x, y); + T Test2 = min(z, w); + + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + { + U Result1 = x < y ? a[i] : b[i]; + U Result2 = z < w ? c[i] : d[i]; + Result[i] = Test1 < Test2 ? Result1 : Result2; + } + return Result; +} + +// Min comparison between 4 variables +template +GLM_FUNC_QUALIFIER vec associatedMin +( + vec const& x, U a, + vec const& y, U b, + vec const& z, U c, + vec const& w, U d +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + { + T Test1 = min(x[i], y[i]); + T Test2 = min(z[i], w[i]); + U Result1 = x[i] < y[i] ? a : b; + U Result2 = z[i] < w[i] ? c : d; + Result[i] = Test1 < Test2 ? Result1 : Result2; + } + return Result; +} + +// Max comparison between 2 variables +template +GLM_FUNC_QUALIFIER U associatedMax(T x, U a, T y, U b) +{ + return x > y ? a : b; +} + +// Max comparison between 2 variables +template +GLM_FUNC_QUALIFIER vec<2, U, Q> associatedMax +( + vec const& x, vec const& a, + vec const& y, vec const& b +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x[i] > y[i] ? a[i] : b[i]; + return Result; +} + +// Max comparison between 2 variables +template +GLM_FUNC_QUALIFIER vec associatedMax +( + T x, vec const& a, + T y, vec const& b +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x > y ? a[i] : b[i]; + return Result; +} + +// Max comparison between 2 variables +template +GLM_FUNC_QUALIFIER vec associatedMax +( + vec const& x, U a, + vec const& y, U b +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x[i] > y[i] ? a : b; + return Result; +} + +// Max comparison between 3 variables +template +GLM_FUNC_QUALIFIER U associatedMax +( + T x, U a, + T y, U b, + T z, U c +) +{ + U Result = x > y ? (x > z ? a : c) : (y > z ? b : c); + return Result; +} + +// Max comparison between 3 variables +template +GLM_FUNC_QUALIFIER vec associatedMax +( + vec const& x, vec const& a, + vec const& y, vec const& b, + vec const& z, vec const& c +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x[i] > y[i] ? (x[i] > z[i] ? a[i] : c[i]) : (y[i] > z[i] ? b[i] : c[i]); + return Result; +} + +// Max comparison between 3 variables +template +GLM_FUNC_QUALIFIER vec associatedMax +( + T x, vec const& a, + T y, vec const& b, + T z, vec const& c +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x > y ? (x > z ? a[i] : c[i]) : (y > z ? b[i] : c[i]); + return Result; +} + +// Max comparison between 3 variables +template +GLM_FUNC_QUALIFIER vec associatedMax +( + vec const& x, U a, + vec const& y, U b, + vec const& z, U c +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + Result[i] = x[i] > y[i] ? (x[i] > z[i] ? a : c) : (y[i] > z[i] ? b : c); + return Result; +} + +// Max comparison between 4 variables +template +GLM_FUNC_QUALIFIER U associatedMax +( + T x, U a, + T y, U b, + T z, U c, + T w, U d +) +{ + T Test1 = max(x, y); + T Test2 = max(z, w); + U Result1 = x > y ? a : b; + U Result2 = z > w ? c : d; + U Result = Test1 > Test2 ? Result1 : Result2; + return Result; +} + +// Max comparison between 4 variables +template +GLM_FUNC_QUALIFIER vec associatedMax +( + vec const& x, vec const& a, + vec const& y, vec const& b, + vec const& z, vec const& c, + vec const& w, vec const& d +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + { + T Test1 = max(x[i], y[i]); + T Test2 = max(z[i], w[i]); + U Result1 = x[i] > y[i] ? a[i] : b[i]; + U Result2 = z[i] > w[i] ? c[i] : d[i]; + Result[i] = Test1 > Test2 ? Result1 : Result2; + } + return Result; +} + +// Max comparison between 4 variables +template +GLM_FUNC_QUALIFIER vec associatedMax +( + T x, vec const& a, + T y, vec const& b, + T z, vec const& c, + T w, vec const& d +) +{ + T Test1 = max(x, y); + T Test2 = max(z, w); + + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + { + U Result1 = x > y ? a[i] : b[i]; + U Result2 = z > w ? c[i] : d[i]; + Result[i] = Test1 > Test2 ? Result1 : Result2; + } + return Result; +} + +// Max comparison between 4 variables +template +GLM_FUNC_QUALIFIER vec associatedMax +( + vec const& x, U a, + vec const& y, U b, + vec const& z, U c, + vec const& w, U d +) +{ + vec Result; + for(length_t i = 0, n = Result.length(); i < n; ++i) + { + T Test1 = max(x[i], y[i]); + T Test2 = max(z[i], w[i]); + U Result1 = x[i] > y[i] ? a : b; + U Result2 = z[i] > w[i] ? c : d; + Result[i] = Test1 > Test2 ? Result1 : Result2; + } + return Result; +} +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/bit.hpp b/MacroLibX/third_party/glm/gtx/bit.hpp new file mode 100644 index 0000000..60a7aef --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/bit.hpp @@ -0,0 +1,98 @@ +/// @ref gtx_bit +/// @file glm/gtx/bit.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_bit GLM_GTX_bit +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Allow to perform bit operations on integer values + +#pragma once + +// Dependencies +#include "../gtc/bitfield.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_bit is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_bit extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_bit + /// @{ + + /// @see gtx_bit + template + GLM_FUNC_DECL genIUType highestBitValue(genIUType Value); + + /// @see gtx_bit + template + GLM_FUNC_DECL genIUType lowestBitValue(genIUType Value); + + /// Find the highest bit set to 1 in a integer variable and return its value. + /// + /// @see gtx_bit + template + GLM_FUNC_DECL vec highestBitValue(vec const& value); + + /// Return the power of two number which value is just higher the input value. + /// Deprecated, use ceilPowerOfTwo from GTC_round instead + /// + /// @see gtc_round + /// @see gtx_bit + template + GLM_DEPRECATED GLM_FUNC_DECL genIUType powerOfTwoAbove(genIUType Value); + + /// Return the power of two number which value is just higher the input value. + /// Deprecated, use ceilPowerOfTwo from GTC_round instead + /// + /// @see gtc_round + /// @see gtx_bit + template + GLM_DEPRECATED GLM_FUNC_DECL vec powerOfTwoAbove(vec const& value); + + /// Return the power of two number which value is just lower the input value. + /// Deprecated, use floorPowerOfTwo from GTC_round instead + /// + /// @see gtc_round + /// @see gtx_bit + template + GLM_DEPRECATED GLM_FUNC_DECL genIUType powerOfTwoBelow(genIUType Value); + + /// Return the power of two number which value is just lower the input value. + /// Deprecated, use floorPowerOfTwo from GTC_round instead + /// + /// @see gtc_round + /// @see gtx_bit + template + GLM_DEPRECATED GLM_FUNC_DECL vec powerOfTwoBelow(vec const& value); + + /// Return the power of two number which value is the closet to the input value. + /// Deprecated, use roundPowerOfTwo from GTC_round instead + /// + /// @see gtc_round + /// @see gtx_bit + template + GLM_DEPRECATED GLM_FUNC_DECL genIUType powerOfTwoNearest(genIUType Value); + + /// Return the power of two number which value is the closet to the input value. + /// Deprecated, use roundPowerOfTwo from GTC_round instead + /// + /// @see gtc_round + /// @see gtx_bit + template + GLM_DEPRECATED GLM_FUNC_DECL vec powerOfTwoNearest(vec const& value); + + /// @} +} //namespace glm + + +#include "bit.inl" + diff --git a/MacroLibX/third_party/glm/gtx/bit.inl b/MacroLibX/third_party/glm/gtx/bit.inl new file mode 100644 index 0000000..621b626 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/bit.inl @@ -0,0 +1,92 @@ +/// @ref gtx_bit + +namespace glm +{ + /////////////////// + // highestBitValue + + template + GLM_FUNC_QUALIFIER genIUType highestBitValue(genIUType Value) + { + genIUType tmp = Value; + genIUType result = genIUType(0); + while(tmp) + { + result = (tmp & (~tmp + 1)); // grab lowest bit + tmp &= ~result; // clear lowest bit + } + return result; + } + + template + GLM_FUNC_QUALIFIER vec highestBitValue(vec const& v) + { + return detail::functor1::call(highestBitValue, v); + } + + /////////////////// + // lowestBitValue + + template + GLM_FUNC_QUALIFIER genIUType lowestBitValue(genIUType Value) + { + return (Value & (~Value + 1)); + } + + template + GLM_FUNC_QUALIFIER vec lowestBitValue(vec const& v) + { + return detail::functor1::call(lowestBitValue, v); + } + + /////////////////// + // powerOfTwoAbove + + template + GLM_FUNC_QUALIFIER genType powerOfTwoAbove(genType value) + { + return isPowerOfTwo(value) ? value : highestBitValue(value) << 1; + } + + template + GLM_FUNC_QUALIFIER vec powerOfTwoAbove(vec const& v) + { + return detail::functor1::call(powerOfTwoAbove, v); + } + + /////////////////// + // powerOfTwoBelow + + template + GLM_FUNC_QUALIFIER genType powerOfTwoBelow(genType value) + { + return isPowerOfTwo(value) ? value : highestBitValue(value); + } + + template + GLM_FUNC_QUALIFIER vec powerOfTwoBelow(vec const& v) + { + return detail::functor1::call(powerOfTwoBelow, v); + } + + ///////////////////// + // powerOfTwoNearest + + template + GLM_FUNC_QUALIFIER genType powerOfTwoNearest(genType value) + { + if(isPowerOfTwo(value)) + return value; + + genType const prev = highestBitValue(value); + genType const next = prev << 1; + return (next - value) < (value - prev) ? next : prev; + } + + template + GLM_FUNC_QUALIFIER vec powerOfTwoNearest(vec const& v) + { + return detail::functor1::call(powerOfTwoNearest, v); + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/closest_point.hpp b/MacroLibX/third_party/glm/gtx/closest_point.hpp new file mode 100644 index 0000000..de6dbbf --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/closest_point.hpp @@ -0,0 +1,49 @@ +/// @ref gtx_closest_point +/// @file glm/gtx/closest_point.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_closest_point GLM_GTX_closest_point +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Find the point on a straight line which is the closet of a point. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_closest_point is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_closest_point extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_closest_point + /// @{ + + /// Find the point on a straight line which is the closet of a point. + /// @see gtx_closest_point + template + GLM_FUNC_DECL vec<3, T, Q> closestPointOnLine( + vec<3, T, Q> const& point, + vec<3, T, Q> const& a, + vec<3, T, Q> const& b); + + /// 2d lines work as well + template + GLM_FUNC_DECL vec<2, T, Q> closestPointOnLine( + vec<2, T, Q> const& point, + vec<2, T, Q> const& a, + vec<2, T, Q> const& b); + + /// @} +}// namespace glm + +#include "closest_point.inl" diff --git a/MacroLibX/third_party/glm/gtx/closest_point.inl b/MacroLibX/third_party/glm/gtx/closest_point.inl new file mode 100644 index 0000000..0a39b04 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/closest_point.inl @@ -0,0 +1,45 @@ +/// @ref gtx_closest_point + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> closestPointOnLine + ( + vec<3, T, Q> const& point, + vec<3, T, Q> const& a, + vec<3, T, Q> const& b + ) + { + T LineLength = distance(a, b); + vec<3, T, Q> Vector = point - a; + vec<3, T, Q> LineDirection = (b - a) / LineLength; + + // Project Vector to LineDirection to get the distance of point from a + T Distance = dot(Vector, LineDirection); + + if(Distance <= T(0)) return a; + if(Distance >= LineLength) return b; + return a + LineDirection * Distance; + } + + template + GLM_FUNC_QUALIFIER vec<2, T, Q> closestPointOnLine + ( + vec<2, T, Q> const& point, + vec<2, T, Q> const& a, + vec<2, T, Q> const& b + ) + { + T LineLength = distance(a, b); + vec<2, T, Q> Vector = point - a; + vec<2, T, Q> LineDirection = (b - a) / LineLength; + + // Project Vector to LineDirection to get the distance of point from a + T Distance = dot(Vector, LineDirection); + + if(Distance <= T(0)) return a; + if(Distance >= LineLength) return b; + return a + LineDirection * Distance; + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/color_encoding.hpp b/MacroLibX/third_party/glm/gtx/color_encoding.hpp new file mode 100644 index 0000000..96ded2a --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/color_encoding.hpp @@ -0,0 +1,54 @@ +/// @ref gtx_color_encoding +/// @file glm/gtx/color_encoding.hpp +/// +/// @see core (dependence) +/// @see gtx_color_encoding (dependence) +/// +/// @defgroup gtx_color_encoding GLM_GTX_color_encoding +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// @brief Allow to perform bit operations on integer values + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../vec3.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTC_color_encoding is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTC_color_encoding extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_color_encoding + /// @{ + + /// Convert a linear sRGB color to D65 YUV. + template + GLM_FUNC_DECL vec<3, T, Q> convertLinearSRGBToD65XYZ(vec<3, T, Q> const& ColorLinearSRGB); + + /// Convert a linear sRGB color to D50 YUV. + template + GLM_FUNC_DECL vec<3, T, Q> convertLinearSRGBToD50XYZ(vec<3, T, Q> const& ColorLinearSRGB); + + /// Convert a D65 YUV color to linear sRGB. + template + GLM_FUNC_DECL vec<3, T, Q> convertD65XYZToLinearSRGB(vec<3, T, Q> const& ColorD65XYZ); + + /// Convert a D65 YUV color to D50 YUV. + template + GLM_FUNC_DECL vec<3, T, Q> convertD65XYZToD50XYZ(vec<3, T, Q> const& ColorD65XYZ); + + /// @} +} //namespace glm + +#include "color_encoding.inl" diff --git a/MacroLibX/third_party/glm/gtx/color_encoding.inl b/MacroLibX/third_party/glm/gtx/color_encoding.inl new file mode 100644 index 0000000..e50fa3e --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/color_encoding.inl @@ -0,0 +1,45 @@ +/// @ref gtx_color_encoding + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> convertLinearSRGBToD65XYZ(vec<3, T, Q> const& ColorLinearSRGB) + { + vec<3, T, Q> const M(0.490f, 0.17697f, 0.2f); + vec<3, T, Q> const N(0.31f, 0.8124f, 0.01063f); + vec<3, T, Q> const O(0.490f, 0.01f, 0.99f); + + return (M * ColorLinearSRGB + N * ColorLinearSRGB + O * ColorLinearSRGB) * static_cast(5.650675255693055f); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> convertLinearSRGBToD50XYZ(vec<3, T, Q> const& ColorLinearSRGB) + { + vec<3, T, Q> const M(0.436030342570117f, 0.222438466210245f, 0.013897440074263f); + vec<3, T, Q> const N(0.385101860087134f, 0.716942745571917f, 0.097076381494207f); + vec<3, T, Q> const O(0.143067806654203f, 0.060618777416563f, 0.713926257896652f); + + return M * ColorLinearSRGB + N * ColorLinearSRGB + O * ColorLinearSRGB; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> convertD65XYZToLinearSRGB(vec<3, T, Q> const& ColorD65XYZ) + { + vec<3, T, Q> const M(0.41847f, -0.091169f, 0.0009209f); + vec<3, T, Q> const N(-0.15866f, 0.25243f, 0.015708f); + vec<3, T, Q> const O(0.0009209f, -0.0025498f, 0.1786f); + + return M * ColorD65XYZ + N * ColorD65XYZ + O * ColorD65XYZ; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> convertD65XYZToD50XYZ(vec<3, T, Q> const& ColorD65XYZ) + { + vec<3, T, Q> const M(+1.047844353856414f, +0.029549007606644f, -0.009250984365223f); + vec<3, T, Q> const N(+0.022898981050086f, +0.990508028941971f, +0.015072338237051f); + vec<3, T, Q> const O(-0.050206647741605f, -0.017074711360960f, +0.751717835079977f); + + return M * ColorD65XYZ + N * ColorD65XYZ + O * ColorD65XYZ; + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/color_space.hpp b/MacroLibX/third_party/glm/gtx/color_space.hpp new file mode 100644 index 0000000..a634392 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/color_space.hpp @@ -0,0 +1,72 @@ +/// @ref gtx_color_space +/// @file glm/gtx/color_space.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_color_space GLM_GTX_color_space +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Related to RGB to HSV conversions and operations. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_color_space is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_color_space extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_color_space + /// @{ + + /// Converts a color from HSV color space to its color in RGB color space. + /// @see gtx_color_space + template + GLM_FUNC_DECL vec<3, T, Q> rgbColor( + vec<3, T, Q> const& hsvValue); + + /// Converts a color from RGB color space to its color in HSV color space. + /// @see gtx_color_space + template + GLM_FUNC_DECL vec<3, T, Q> hsvColor( + vec<3, T, Q> const& rgbValue); + + /// Build a saturation matrix. + /// @see gtx_color_space + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> saturation( + T const s); + + /// Modify the saturation of a color. + /// @see gtx_color_space + template + GLM_FUNC_DECL vec<3, T, Q> saturation( + T const s, + vec<3, T, Q> const& color); + + /// Modify the saturation of a color. + /// @see gtx_color_space + template + GLM_FUNC_DECL vec<4, T, Q> saturation( + T const s, + vec<4, T, Q> const& color); + + /// Compute color luminosity associating ratios (0.33, 0.59, 0.11) to RGB canals. + /// @see gtx_color_space + template + GLM_FUNC_DECL T luminosity( + vec<3, T, Q> const& color); + + /// @} +}//namespace glm + +#include "color_space.inl" diff --git a/MacroLibX/third_party/glm/gtx/color_space.inl b/MacroLibX/third_party/glm/gtx/color_space.inl new file mode 100644 index 0000000..f698afe --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/color_space.inl @@ -0,0 +1,141 @@ +/// @ref gtx_color_space + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rgbColor(const vec<3, T, Q>& hsvColor) + { + vec<3, T, Q> hsv = hsvColor; + vec<3, T, Q> rgbColor; + + if(hsv.y == static_cast(0)) + // achromatic (grey) + rgbColor = vec<3, T, Q>(hsv.z); + else + { + T sector = floor(hsv.x * (T(1) / T(60))); + T frac = (hsv.x * (T(1) / T(60))) - sector; + // factorial part of h + T o = hsv.z * (T(1) - hsv.y); + T p = hsv.z * (T(1) - hsv.y * frac); + T q = hsv.z * (T(1) - hsv.y * (T(1) - frac)); + + switch(int(sector)) + { + default: + case 0: + rgbColor.r = hsv.z; + rgbColor.g = q; + rgbColor.b = o; + break; + case 1: + rgbColor.r = p; + rgbColor.g = hsv.z; + rgbColor.b = o; + break; + case 2: + rgbColor.r = o; + rgbColor.g = hsv.z; + rgbColor.b = q; + break; + case 3: + rgbColor.r = o; + rgbColor.g = p; + rgbColor.b = hsv.z; + break; + case 4: + rgbColor.r = q; + rgbColor.g = o; + rgbColor.b = hsv.z; + break; + case 5: + rgbColor.r = hsv.z; + rgbColor.g = o; + rgbColor.b = p; + break; + } + } + + return rgbColor; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> hsvColor(const vec<3, T, Q>& rgbColor) + { + vec<3, T, Q> hsv = rgbColor; + float Min = min(min(rgbColor.r, rgbColor.g), rgbColor.b); + float Max = max(max(rgbColor.r, rgbColor.g), rgbColor.b); + float Delta = Max - Min; + + hsv.z = Max; + + if(Max != static_cast(0)) + { + hsv.y = Delta / hsv.z; + T h = static_cast(0); + + if(rgbColor.r == Max) + // between yellow & magenta + h = static_cast(0) + T(60) * (rgbColor.g - rgbColor.b) / Delta; + else if(rgbColor.g == Max) + // between cyan & yellow + h = static_cast(120) + T(60) * (rgbColor.b - rgbColor.r) / Delta; + else + // between magenta & cyan + h = static_cast(240) + T(60) * (rgbColor.r - rgbColor.g) / Delta; + + if(h < T(0)) + hsv.x = h + T(360); + else + hsv.x = h; + } + else + { + // If r = g = b = 0 then s = 0, h is undefined + hsv.y = static_cast(0); + hsv.x = static_cast(0); + } + + return hsv; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> saturation(T const s) + { + vec<3, T, defaultp> rgbw = vec<3, T, defaultp>(T(0.2126), T(0.7152), T(0.0722)); + + vec<3, T, defaultp> const col((T(1) - s) * rgbw); + + mat<4, 4, T, defaultp> result(T(1)); + result[0][0] = col.x + s; + result[0][1] = col.x; + result[0][2] = col.x; + result[1][0] = col.y; + result[1][1] = col.y + s; + result[1][2] = col.y; + result[2][0] = col.z; + result[2][1] = col.z; + result[2][2] = col.z + s; + + return result; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> saturation(const T s, const vec<3, T, Q>& color) + { + return vec<3, T, Q>(saturation(s) * vec<4, T, Q>(color, T(0))); + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> saturation(const T s, const vec<4, T, Q>& color) + { + return saturation(s) * color; + } + + template + GLM_FUNC_QUALIFIER T luminosity(const vec<3, T, Q>& color) + { + const vec<3, T, Q> tmp = vec<3, T, Q>(0.33, 0.59, 0.11); + return dot(color, tmp); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/color_space_YCoCg.hpp b/MacroLibX/third_party/glm/gtx/color_space_YCoCg.hpp new file mode 100644 index 0000000..dd2b771 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/color_space_YCoCg.hpp @@ -0,0 +1,60 @@ +/// @ref gtx_color_space_YCoCg +/// @file glm/gtx/color_space_YCoCg.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_color_space_YCoCg GLM_GTX_color_space_YCoCg +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// RGB to YCoCg conversions and operations + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_color_space_YCoCg is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_color_space_YCoCg extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_color_space_YCoCg + /// @{ + + /// Convert a color from RGB color space to YCoCg color space. + /// @see gtx_color_space_YCoCg + template + GLM_FUNC_DECL vec<3, T, Q> rgb2YCoCg( + vec<3, T, Q> const& rgbColor); + + /// Convert a color from YCoCg color space to RGB color space. + /// @see gtx_color_space_YCoCg + template + GLM_FUNC_DECL vec<3, T, Q> YCoCg2rgb( + vec<3, T, Q> const& YCoCgColor); + + /// Convert a color from RGB color space to YCoCgR color space. + /// @see "YCoCg-R: A Color Space with RGB Reversibility and Low Dynamic Range" + /// @see gtx_color_space_YCoCg + template + GLM_FUNC_DECL vec<3, T, Q> rgb2YCoCgR( + vec<3, T, Q> const& rgbColor); + + /// Convert a color from YCoCgR color space to RGB color space. + /// @see "YCoCg-R: A Color Space with RGB Reversibility and Low Dynamic Range" + /// @see gtx_color_space_YCoCg + template + GLM_FUNC_DECL vec<3, T, Q> YCoCgR2rgb( + vec<3, T, Q> const& YCoCgColor); + + /// @} +}//namespace glm + +#include "color_space_YCoCg.inl" diff --git a/MacroLibX/third_party/glm/gtx/color_space_YCoCg.inl b/MacroLibX/third_party/glm/gtx/color_space_YCoCg.inl new file mode 100644 index 0000000..83ba857 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/color_space_YCoCg.inl @@ -0,0 +1,107 @@ +/// @ref gtx_color_space_YCoCg + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rgb2YCoCg + ( + vec<3, T, Q> const& rgbColor + ) + { + vec<3, T, Q> result; + result.x/*Y */ = rgbColor.r / T(4) + rgbColor.g / T(2) + rgbColor.b / T(4); + result.y/*Co*/ = rgbColor.r / T(2) + rgbColor.g * T(0) - rgbColor.b / T(2); + result.z/*Cg*/ = - rgbColor.r / T(4) + rgbColor.g / T(2) - rgbColor.b / T(4); + return result; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> YCoCg2rgb + ( + vec<3, T, Q> const& YCoCgColor + ) + { + vec<3, T, Q> result; + result.r = YCoCgColor.x + YCoCgColor.y - YCoCgColor.z; + result.g = YCoCgColor.x + YCoCgColor.z; + result.b = YCoCgColor.x - YCoCgColor.y - YCoCgColor.z; + return result; + } + + template + class compute_YCoCgR { + public: + static GLM_FUNC_QUALIFIER vec<3, T, Q> rgb2YCoCgR + ( + vec<3, T, Q> const& rgbColor + ) + { + vec<3, T, Q> result; + result.x/*Y */ = rgbColor.g * static_cast(0.5) + (rgbColor.r + rgbColor.b) * static_cast(0.25); + result.y/*Co*/ = rgbColor.r - rgbColor.b; + result.z/*Cg*/ = rgbColor.g - (rgbColor.r + rgbColor.b) * static_cast(0.5); + return result; + } + + static GLM_FUNC_QUALIFIER vec<3, T, Q> YCoCgR2rgb + ( + vec<3, T, Q> const& YCoCgRColor + ) + { + vec<3, T, Q> result; + T tmp = YCoCgRColor.x - (YCoCgRColor.z * static_cast(0.5)); + result.g = YCoCgRColor.z + tmp; + result.b = tmp - (YCoCgRColor.y * static_cast(0.5)); + result.r = result.b + YCoCgRColor.y; + return result; + } + }; + + template + class compute_YCoCgR { + public: + static GLM_FUNC_QUALIFIER vec<3, T, Q> rgb2YCoCgR + ( + vec<3, T, Q> const& rgbColor + ) + { + vec<3, T, Q> result; + result.y/*Co*/ = rgbColor.r - rgbColor.b; + T tmp = rgbColor.b + (result.y >> 1); + result.z/*Cg*/ = rgbColor.g - tmp; + result.x/*Y */ = tmp + (result.z >> 1); + return result; + } + + static GLM_FUNC_QUALIFIER vec<3, T, Q> YCoCgR2rgb + ( + vec<3, T, Q> const& YCoCgRColor + ) + { + vec<3, T, Q> result; + T tmp = YCoCgRColor.x - (YCoCgRColor.z >> 1); + result.g = YCoCgRColor.z + tmp; + result.b = tmp - (YCoCgRColor.y >> 1); + result.r = result.b + YCoCgRColor.y; + return result; + } + }; + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rgb2YCoCgR + ( + vec<3, T, Q> const& rgbColor + ) + { + return compute_YCoCgR::is_integer>::rgb2YCoCgR(rgbColor); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> YCoCgR2rgb + ( + vec<3, T, Q> const& YCoCgRColor + ) + { + return compute_YCoCgR::is_integer>::YCoCgR2rgb(YCoCgRColor); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/common.hpp b/MacroLibX/third_party/glm/gtx/common.hpp new file mode 100644 index 0000000..254ada2 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/common.hpp @@ -0,0 +1,76 @@ +/// @ref gtx_common +/// @file glm/gtx/common.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_common GLM_GTX_common +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// @brief Provide functions to increase the compatibility with Cg and HLSL languages + +#pragma once + +// Dependencies: +#include "../vec2.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" +#include "../gtc/vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_common is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_common extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_common + /// @{ + + /// Returns true if x is a denormalized number + /// Numbers whose absolute value is too small to be represented in the normal format are represented in an alternate, denormalized format. + /// This format is less precise but can represent values closer to zero. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see GLSL isnan man page + /// @see GLSL 4.20.8 specification, section 8.3 Common Functions + template + GLM_FUNC_DECL typename genType::bool_type isdenormal(genType const& x); + + /// Similar to 'mod' but with a different rounding and integer support. + /// Returns 'x - y * trunc(x/y)' instead of 'x - y * floor(x/y)' + /// + /// @see GLSL mod vs HLSL fmod + /// @see GLSL mod man page + template + GLM_FUNC_DECL vec fmod(vec const& v); + + /// Returns whether vector components values are within an interval. A open interval excludes its endpoints, and is denoted with square brackets. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_vector_relational + template + GLM_FUNC_DECL vec openBounded(vec const& Value, vec const& Min, vec const& Max); + + /// Returns whether vector components values are within an interval. A closed interval includes its endpoints, and is denoted with square brackets. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see ext_vector_relational + template + GLM_FUNC_DECL vec closeBounded(vec const& Value, vec const& Min, vec const& Max); + + /// @} +}//namespace glm + +#include "common.inl" diff --git a/MacroLibX/third_party/glm/gtx/common.inl b/MacroLibX/third_party/glm/gtx/common.inl new file mode 100644 index 0000000..4ad2126 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/common.inl @@ -0,0 +1,125 @@ +/// @ref gtx_common + +#include +#include "../gtc/epsilon.hpp" +#include "../gtc/constants.hpp" + +namespace glm{ +namespace detail +{ + template + struct compute_fmod + { + GLM_FUNC_QUALIFIER static vec call(vec const& a, vec const& b) + { + return detail::functor2::call(std::fmod, a, b); + } + }; + + template + struct compute_fmod + { + GLM_FUNC_QUALIFIER static vec call(vec const& a, vec const& b) + { + return a % b; + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER bool isdenormal(T const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); + +# if GLM_HAS_CXX11_STL + return std::fpclassify(x) == FP_SUBNORMAL; +# else + return epsilonNotEqual(x, static_cast(0), epsilon()) && std::fabs(x) < std::numeric_limits::min(); +# endif + } + + template + GLM_FUNC_QUALIFIER typename vec<1, T, Q>::bool_type isdenormal + ( + vec<1, T, Q> const& x + ) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); + + return typename vec<1, T, Q>::bool_type( + isdenormal(x.x)); + } + + template + GLM_FUNC_QUALIFIER typename vec<2, T, Q>::bool_type isdenormal + ( + vec<2, T, Q> const& x + ) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); + + return typename vec<2, T, Q>::bool_type( + isdenormal(x.x), + isdenormal(x.y)); + } + + template + GLM_FUNC_QUALIFIER typename vec<3, T, Q>::bool_type isdenormal + ( + vec<3, T, Q> const& x + ) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); + + return typename vec<3, T, Q>::bool_type( + isdenormal(x.x), + isdenormal(x.y), + isdenormal(x.z)); + } + + template + GLM_FUNC_QUALIFIER typename vec<4, T, Q>::bool_type isdenormal + ( + vec<4, T, Q> const& x + ) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); + + return typename vec<4, T, Q>::bool_type( + isdenormal(x.x), + isdenormal(x.y), + isdenormal(x.z), + isdenormal(x.w)); + } + + // fmod + template + GLM_FUNC_QUALIFIER genType fmod(genType x, genType y) + { + return fmod(vec<1, genType>(x), y).x; + } + + template + GLM_FUNC_QUALIFIER vec fmod(vec const& x, T y) + { + return detail::compute_fmod::is_iec559>::call(x, vec(y)); + } + + template + GLM_FUNC_QUALIFIER vec fmod(vec const& x, vec const& y) + { + return detail::compute_fmod::is_iec559>::call(x, y); + } + + template + GLM_FUNC_QUALIFIER vec openBounded(vec const& Value, vec const& Min, vec const& Max) + { + return greaterThan(Value, Min) && lessThan(Value, Max); + } + + template + GLM_FUNC_QUALIFIER vec closeBounded(vec const& Value, vec const& Min, vec const& Max) + { + return greaterThanEqual(Value, Min) && lessThanEqual(Value, Max); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/compatibility.hpp b/MacroLibX/third_party/glm/gtx/compatibility.hpp new file mode 100644 index 0000000..f1b00a6 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/compatibility.hpp @@ -0,0 +1,133 @@ +/// @ref gtx_compatibility +/// @file glm/gtx/compatibility.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_compatibility GLM_GTX_compatibility +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Provide functions to increase the compatibility with Cg and HLSL languages + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/quaternion.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_compatibility is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_compatibility extension included") +# endif +#endif + +#if GLM_COMPILER & GLM_COMPILER_VC +# include +#elif GLM_COMPILER & GLM_COMPILER_GCC +# include +# if(GLM_PLATFORM & GLM_PLATFORM_ANDROID) +# undef isfinite +# endif +#endif//GLM_COMPILER + +namespace glm +{ + /// @addtogroup gtx_compatibility + /// @{ + + template GLM_FUNC_QUALIFIER T lerp(T x, T y, T a){return mix(x, y, a);} //!< \brief Returns x * (1.0 - a) + y * a, i.e., the linear blend of x and y using the floating-point value a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<2, T, Q> lerp(const vec<2, T, Q>& x, const vec<2, T, Q>& y, T a){return mix(x, y, a);} //!< \brief Returns x * (1.0 - a) + y * a, i.e., the linear blend of x and y using the floating-point value a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) + + template GLM_FUNC_QUALIFIER vec<3, T, Q> lerp(const vec<3, T, Q>& x, const vec<3, T, Q>& y, T a){return mix(x, y, a);} //!< \brief Returns x * (1.0 - a) + y * a, i.e., the linear blend of x and y using the floating-point value a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<4, T, Q> lerp(const vec<4, T, Q>& x, const vec<4, T, Q>& y, T a){return mix(x, y, a);} //!< \brief Returns x * (1.0 - a) + y * a, i.e., the linear blend of x and y using the floating-point value a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<2, T, Q> lerp(const vec<2, T, Q>& x, const vec<2, T, Q>& y, const vec<2, T, Q>& a){return mix(x, y, a);} //!< \brief Returns the component-wise result of x * (1.0 - a) + y * a, i.e., the linear blend of x and y using vector a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<3, T, Q> lerp(const vec<3, T, Q>& x, const vec<3, T, Q>& y, const vec<3, T, Q>& a){return mix(x, y, a);} //!< \brief Returns the component-wise result of x * (1.0 - a) + y * a, i.e., the linear blend of x and y using vector a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<4, T, Q> lerp(const vec<4, T, Q>& x, const vec<4, T, Q>& y, const vec<4, T, Q>& a){return mix(x, y, a);} //!< \brief Returns the component-wise result of x * (1.0 - a) + y * a, i.e., the linear blend of x and y using vector a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) + + template GLM_FUNC_QUALIFIER T saturate(T x){return clamp(x, T(0), T(1));} //!< \brief Returns clamp(x, 0, 1) for each component in x. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<2, T, Q> saturate(const vec<2, T, Q>& x){return clamp(x, T(0), T(1));} //!< \brief Returns clamp(x, 0, 1) for each component in x. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<3, T, Q> saturate(const vec<3, T, Q>& x){return clamp(x, T(0), T(1));} //!< \brief Returns clamp(x, 0, 1) for each component in x. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<4, T, Q> saturate(const vec<4, T, Q>& x){return clamp(x, T(0), T(1));} //!< \brief Returns clamp(x, 0, 1) for each component in x. (From GLM_GTX_compatibility) + + template GLM_FUNC_QUALIFIER T atan2(T x, T y){return atan(x, y);} //!< \brief Arc tangent. Returns an angle whose tangent is y/x. The signs of x and y are used to determine what quadrant the angle is in. The range of values returned by this function is [-PI, PI]. Results are undefined if x and y are both 0. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<2, T, Q> atan2(const vec<2, T, Q>& x, const vec<2, T, Q>& y){return atan(x, y);} //!< \brief Arc tangent. Returns an angle whose tangent is y/x. The signs of x and y are used to determine what quadrant the angle is in. The range of values returned by this function is [-PI, PI]. Results are undefined if x and y are both 0. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<3, T, Q> atan2(const vec<3, T, Q>& x, const vec<3, T, Q>& y){return atan(x, y);} //!< \brief Arc tangent. Returns an angle whose tangent is y/x. The signs of x and y are used to determine what quadrant the angle is in. The range of values returned by this function is [-PI, PI]. Results are undefined if x and y are both 0. (From GLM_GTX_compatibility) + template GLM_FUNC_QUALIFIER vec<4, T, Q> atan2(const vec<4, T, Q>& x, const vec<4, T, Q>& y){return atan(x, y);} //!< \brief Arc tangent. Returns an angle whose tangent is y/x. The signs of x and y are used to determine what quadrant the angle is in. The range of values returned by this function is [-PI, PI]. Results are undefined if x and y are both 0. (From GLM_GTX_compatibility) + + template GLM_FUNC_DECL bool isfinite(genType const& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) + template GLM_FUNC_DECL vec<1, bool, Q> isfinite(const vec<1, T, Q>& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) + template GLM_FUNC_DECL vec<2, bool, Q> isfinite(const vec<2, T, Q>& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) + template GLM_FUNC_DECL vec<3, bool, Q> isfinite(const vec<3, T, Q>& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) + template GLM_FUNC_DECL vec<4, bool, Q> isfinite(const vec<4, T, Q>& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) + + typedef bool bool1; //!< \brief boolean type with 1 component. (From GLM_GTX_compatibility extension) + typedef vec<2, bool, highp> bool2; //!< \brief boolean type with 2 components. (From GLM_GTX_compatibility extension) + typedef vec<3, bool, highp> bool3; //!< \brief boolean type with 3 components. (From GLM_GTX_compatibility extension) + typedef vec<4, bool, highp> bool4; //!< \brief boolean type with 4 components. (From GLM_GTX_compatibility extension) + + typedef bool bool1x1; //!< \brief boolean matrix with 1 x 1 component. (From GLM_GTX_compatibility extension) + typedef mat<2, 2, bool, highp> bool2x2; //!< \brief boolean matrix with 2 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<2, 3, bool, highp> bool2x3; //!< \brief boolean matrix with 2 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<2, 4, bool, highp> bool2x4; //!< \brief boolean matrix with 2 x 4 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 2, bool, highp> bool3x2; //!< \brief boolean matrix with 3 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 3, bool, highp> bool3x3; //!< \brief boolean matrix with 3 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 4, bool, highp> bool3x4; //!< \brief boolean matrix with 3 x 4 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 2, bool, highp> bool4x2; //!< \brief boolean matrix with 4 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 3, bool, highp> bool4x3; //!< \brief boolean matrix with 4 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 4, bool, highp> bool4x4; //!< \brief boolean matrix with 4 x 4 components. (From GLM_GTX_compatibility extension) + + typedef int int1; //!< \brief integer vector with 1 component. (From GLM_GTX_compatibility extension) + typedef vec<2, int, highp> int2; //!< \brief integer vector with 2 components. (From GLM_GTX_compatibility extension) + typedef vec<3, int, highp> int3; //!< \brief integer vector with 3 components. (From GLM_GTX_compatibility extension) + typedef vec<4, int, highp> int4; //!< \brief integer vector with 4 components. (From GLM_GTX_compatibility extension) + + typedef int int1x1; //!< \brief integer matrix with 1 component. (From GLM_GTX_compatibility extension) + typedef mat<2, 2, int, highp> int2x2; //!< \brief integer matrix with 2 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<2, 3, int, highp> int2x3; //!< \brief integer matrix with 2 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<2, 4, int, highp> int2x4; //!< \brief integer matrix with 2 x 4 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 2, int, highp> int3x2; //!< \brief integer matrix with 3 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 3, int, highp> int3x3; //!< \brief integer matrix with 3 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 4, int, highp> int3x4; //!< \brief integer matrix with 3 x 4 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 2, int, highp> int4x2; //!< \brief integer matrix with 4 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 3, int, highp> int4x3; //!< \brief integer matrix with 4 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 4, int, highp> int4x4; //!< \brief integer matrix with 4 x 4 components. (From GLM_GTX_compatibility extension) + + typedef float float1; //!< \brief single-qualifier floating-point vector with 1 component. (From GLM_GTX_compatibility extension) + typedef vec<2, float, highp> float2; //!< \brief single-qualifier floating-point vector with 2 components. (From GLM_GTX_compatibility extension) + typedef vec<3, float, highp> float3; //!< \brief single-qualifier floating-point vector with 3 components. (From GLM_GTX_compatibility extension) + typedef vec<4, float, highp> float4; //!< \brief single-qualifier floating-point vector with 4 components. (From GLM_GTX_compatibility extension) + + typedef float float1x1; //!< \brief single-qualifier floating-point matrix with 1 component. (From GLM_GTX_compatibility extension) + typedef mat<2, 2, float, highp> float2x2; //!< \brief single-qualifier floating-point matrix with 2 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<2, 3, float, highp> float2x3; //!< \brief single-qualifier floating-point matrix with 2 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<2, 4, float, highp> float2x4; //!< \brief single-qualifier floating-point matrix with 2 x 4 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 2, float, highp> float3x2; //!< \brief single-qualifier floating-point matrix with 3 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 3, float, highp> float3x3; //!< \brief single-qualifier floating-point matrix with 3 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 4, float, highp> float3x4; //!< \brief single-qualifier floating-point matrix with 3 x 4 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 2, float, highp> float4x2; //!< \brief single-qualifier floating-point matrix with 4 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 3, float, highp> float4x3; //!< \brief single-qualifier floating-point matrix with 4 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 4, float, highp> float4x4; //!< \brief single-qualifier floating-point matrix with 4 x 4 components. (From GLM_GTX_compatibility extension) + + typedef double double1; //!< \brief double-qualifier floating-point vector with 1 component. (From GLM_GTX_compatibility extension) + typedef vec<2, double, highp> double2; //!< \brief double-qualifier floating-point vector with 2 components. (From GLM_GTX_compatibility extension) + typedef vec<3, double, highp> double3; //!< \brief double-qualifier floating-point vector with 3 components. (From GLM_GTX_compatibility extension) + typedef vec<4, double, highp> double4; //!< \brief double-qualifier floating-point vector with 4 components. (From GLM_GTX_compatibility extension) + + typedef double double1x1; //!< \brief double-qualifier floating-point matrix with 1 component. (From GLM_GTX_compatibility extension) + typedef mat<2, 2, double, highp> double2x2; //!< \brief double-qualifier floating-point matrix with 2 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<2, 3, double, highp> double2x3; //!< \brief double-qualifier floating-point matrix with 2 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<2, 4, double, highp> double2x4; //!< \brief double-qualifier floating-point matrix with 2 x 4 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 2, double, highp> double3x2; //!< \brief double-qualifier floating-point matrix with 3 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 3, double, highp> double3x3; //!< \brief double-qualifier floating-point matrix with 3 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<3, 4, double, highp> double3x4; //!< \brief double-qualifier floating-point matrix with 3 x 4 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 2, double, highp> double4x2; //!< \brief double-qualifier floating-point matrix with 4 x 2 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 3, double, highp> double4x3; //!< \brief double-qualifier floating-point matrix with 4 x 3 components. (From GLM_GTX_compatibility extension) + typedef mat<4, 4, double, highp> double4x4; //!< \brief double-qualifier floating-point matrix with 4 x 4 components. (From GLM_GTX_compatibility extension) + + /// @} +}//namespace glm + +#include "compatibility.inl" diff --git a/MacroLibX/third_party/glm/gtx/compatibility.inl b/MacroLibX/third_party/glm/gtx/compatibility.inl new file mode 100644 index 0000000..1d49496 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/compatibility.inl @@ -0,0 +1,62 @@ +#include + +namespace glm +{ + // isfinite + template + GLM_FUNC_QUALIFIER bool isfinite( + genType const& x) + { +# if GLM_HAS_CXX11_STL + return std::isfinite(x) != 0; +# elif GLM_COMPILER & GLM_COMPILER_VC + return _finite(x) != 0; +# elif GLM_COMPILER & GLM_COMPILER_GCC && GLM_PLATFORM & GLM_PLATFORM_ANDROID + return _isfinite(x) != 0; +# else + if (std::numeric_limits::is_integer || std::denorm_absent == std::numeric_limits::has_denorm) + return std::numeric_limits::min() <= x && std::numeric_limits::max() >= x; + else + return -std::numeric_limits::max() <= x && std::numeric_limits::max() >= x; +# endif + } + + template + GLM_FUNC_QUALIFIER vec<1, bool, Q> isfinite( + vec<1, T, Q> const& x) + { + return vec<1, bool, Q>( + isfinite(x.x)); + } + + template + GLM_FUNC_QUALIFIER vec<2, bool, Q> isfinite( + vec<2, T, Q> const& x) + { + return vec<2, bool, Q>( + isfinite(x.x), + isfinite(x.y)); + } + + template + GLM_FUNC_QUALIFIER vec<3, bool, Q> isfinite( + vec<3, T, Q> const& x) + { + return vec<3, bool, Q>( + isfinite(x.x), + isfinite(x.y), + isfinite(x.z)); + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> isfinite( + vec<4, T, Q> const& x) + { + return vec<4, bool, Q>( + isfinite(x.x), + isfinite(x.y), + isfinite(x.z), + isfinite(x.w)); + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/component_wise.hpp b/MacroLibX/third_party/glm/gtx/component_wise.hpp new file mode 100644 index 0000000..34a2b0a --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/component_wise.hpp @@ -0,0 +1,69 @@ +/// @ref gtx_component_wise +/// @file glm/gtx/component_wise.hpp +/// @date 2007-05-21 / 2011-06-07 +/// @author Christophe Riccio +/// +/// @see core (dependence) +/// +/// @defgroup gtx_component_wise GLM_GTX_component_wise +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Operations between components of a type + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_component_wise is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_component_wise extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_component_wise + /// @{ + + /// Convert an integer vector to a normalized float vector. + /// If the parameter value type is already a floating qualifier type, the value is passed through. + /// @see gtx_component_wise + template + GLM_FUNC_DECL vec compNormalize(vec const& v); + + /// Convert a normalized float vector to an integer vector. + /// If the parameter value type is already a floating qualifier type, the value is passed through. + /// @see gtx_component_wise + template + GLM_FUNC_DECL vec compScale(vec const& v); + + /// Add all vector components together. + /// @see gtx_component_wise + template + GLM_FUNC_DECL typename genType::value_type compAdd(genType const& v); + + /// Multiply all vector components together. + /// @see gtx_component_wise + template + GLM_FUNC_DECL typename genType::value_type compMul(genType const& v); + + /// Find the minimum value between single vector components. + /// @see gtx_component_wise + template + GLM_FUNC_DECL typename genType::value_type compMin(genType const& v); + + /// Find the maximum value between single vector components. + /// @see gtx_component_wise + template + GLM_FUNC_DECL typename genType::value_type compMax(genType const& v); + + /// @} +}//namespace glm + +#include "component_wise.inl" diff --git a/MacroLibX/third_party/glm/gtx/component_wise.inl b/MacroLibX/third_party/glm/gtx/component_wise.inl new file mode 100644 index 0000000..cbbc7d4 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/component_wise.inl @@ -0,0 +1,127 @@ +/// @ref gtx_component_wise + +#include + +namespace glm{ +namespace detail +{ + template + struct compute_compNormalize + {}; + + template + struct compute_compNormalize + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + floatType const Min = static_cast(std::numeric_limits::min()); + floatType const Max = static_cast(std::numeric_limits::max()); + return (vec(v) - Min) / (Max - Min) * static_cast(2) - static_cast(1); + } + }; + + template + struct compute_compNormalize + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + return vec(v) / static_cast(std::numeric_limits::max()); + } + }; + + template + struct compute_compNormalize + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + return v; + } + }; + + template + struct compute_compScale + {}; + + template + struct compute_compScale + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + floatType const Max = static_cast(std::numeric_limits::max()) + static_cast(0.5); + vec const Scaled(v * Max); + vec const Result(Scaled - static_cast(0.5)); + return Result; + } + }; + + template + struct compute_compScale + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + return vec(vec(v) * static_cast(std::numeric_limits::max())); + } + }; + + template + struct compute_compScale + { + GLM_FUNC_QUALIFIER static vec call(vec const& v) + { + return v; + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER vec compNormalize(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'compNormalize' accepts only floating-point types for 'floatType' template parameter"); + + return detail::compute_compNormalize::is_integer, std::numeric_limits::is_signed>::call(v); + } + + template + GLM_FUNC_QUALIFIER vec compScale(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'compScale' accepts only floating-point types for 'floatType' template parameter"); + + return detail::compute_compScale::is_integer, std::numeric_limits::is_signed>::call(v); + } + + template + GLM_FUNC_QUALIFIER T compAdd(vec const& v) + { + T Result(0); + for(length_t i = 0, n = v.length(); i < n; ++i) + Result += v[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER T compMul(vec const& v) + { + T Result(1); + for(length_t i = 0, n = v.length(); i < n; ++i) + Result *= v[i]; + return Result; + } + + template + GLM_FUNC_QUALIFIER T compMin(vec const& v) + { + T Result(v[0]); + for(length_t i = 1, n = v.length(); i < n; ++i) + Result = min(Result, v[i]); + return Result; + } + + template + GLM_FUNC_QUALIFIER T compMax(vec const& v) + { + T Result(v[0]); + for(length_t i = 1, n = v.length(); i < n; ++i) + Result = max(Result, v[i]); + return Result; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/dual_quaternion.hpp b/MacroLibX/third_party/glm/gtx/dual_quaternion.hpp new file mode 100644 index 0000000..6a51ab7 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/dual_quaternion.hpp @@ -0,0 +1,274 @@ +/// @ref gtx_dual_quaternion +/// @file glm/gtx/dual_quaternion.hpp +/// @author Maksim Vorobiev (msomeone@gmail.com) +/// +/// @see core (dependence) +/// @see gtc_constants (dependence) +/// @see gtc_quaternion (dependence) +/// +/// @defgroup gtx_dual_quaternion GLM_GTX_dual_quaternion +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Defines a templated dual-quaternion type and several dual-quaternion operations. + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/constants.hpp" +#include "../gtc/quaternion.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_dual_quaternion is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_dual_quaternion extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_dual_quaternion + /// @{ + + template + struct tdualquat + { + // -- Implementation detail -- + + typedef T value_type; + typedef qua part_type; + + // -- Data -- + + qua real, dual; + + // -- Component accesses -- + + typedef length_t length_type; + /// Return the count of components of a dual quaternion + GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 2;} + + GLM_FUNC_DECL part_type & operator[](length_type i); + GLM_FUNC_DECL part_type const& operator[](length_type i) const; + + // -- Implicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR tdualquat() GLM_DEFAULT; + GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(tdualquat const& d) GLM_DEFAULT; + template + GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(tdualquat const& d); + + // -- Explicit basic constructors -- + + GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(qua const& real); + GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(qua const& orientation, vec<3, T, Q> const& translation); + GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(qua const& real, qua const& dual); + + // -- Conversion constructors -- + + template + GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT tdualquat(tdualquat const& q); + + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR tdualquat(mat<2, 4, T, Q> const& holder_mat); + GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR tdualquat(mat<3, 4, T, Q> const& aug_mat); + + // -- Unary arithmetic operators -- + + GLM_FUNC_DECL tdualquat & operator=(tdualquat const& m) GLM_DEFAULT; + + template + GLM_FUNC_DECL tdualquat & operator=(tdualquat const& m); + template + GLM_FUNC_DECL tdualquat & operator*=(U s); + template + GLM_FUNC_DECL tdualquat & operator/=(U s); + }; + + // -- Unary bit operators -- + + template + GLM_FUNC_DECL tdualquat operator+(tdualquat const& q); + + template + GLM_FUNC_DECL tdualquat operator-(tdualquat const& q); + + // -- Binary operators -- + + template + GLM_FUNC_DECL tdualquat operator+(tdualquat const& q, tdualquat const& p); + + template + GLM_FUNC_DECL tdualquat operator*(tdualquat const& q, tdualquat const& p); + + template + GLM_FUNC_DECL vec<3, T, Q> operator*(tdualquat const& q, vec<3, T, Q> const& v); + + template + GLM_FUNC_DECL vec<3, T, Q> operator*(vec<3, T, Q> const& v, tdualquat const& q); + + template + GLM_FUNC_DECL vec<4, T, Q> operator*(tdualquat const& q, vec<4, T, Q> const& v); + + template + GLM_FUNC_DECL vec<4, T, Q> operator*(vec<4, T, Q> const& v, tdualquat const& q); + + template + GLM_FUNC_DECL tdualquat operator*(tdualquat const& q, T const& s); + + template + GLM_FUNC_DECL tdualquat operator*(T const& s, tdualquat const& q); + + template + GLM_FUNC_DECL tdualquat operator/(tdualquat const& q, T const& s); + + // -- Boolean operators -- + + template + GLM_FUNC_DECL bool operator==(tdualquat const& q1, tdualquat const& q2); + + template + GLM_FUNC_DECL bool operator!=(tdualquat const& q1, tdualquat const& q2); + + /// Creates an identity dual quaternion. + /// + /// @see gtx_dual_quaternion + template + GLM_FUNC_DECL tdualquat dual_quat_identity(); + + /// Returns the normalized quaternion. + /// + /// @see gtx_dual_quaternion + template + GLM_FUNC_DECL tdualquat normalize(tdualquat const& q); + + /// Returns the linear interpolation of two dual quaternion. + /// + /// @see gtc_dual_quaternion + template + GLM_FUNC_DECL tdualquat lerp(tdualquat const& x, tdualquat const& y, T const& a); + + /// Returns the q inverse. + /// + /// @see gtx_dual_quaternion + template + GLM_FUNC_DECL tdualquat inverse(tdualquat const& q); + + /// Converts a quaternion to a 2 * 4 matrix. + /// + /// @see gtx_dual_quaternion + template + GLM_FUNC_DECL mat<2, 4, T, Q> mat2x4_cast(tdualquat const& x); + + /// Converts a quaternion to a 3 * 4 matrix. + /// + /// @see gtx_dual_quaternion + template + GLM_FUNC_DECL mat<3, 4, T, Q> mat3x4_cast(tdualquat const& x); + + /// Converts a 2 * 4 matrix (matrix which holds real and dual parts) to a quaternion. + /// + /// @see gtx_dual_quaternion + template + GLM_FUNC_DECL tdualquat dualquat_cast(mat<2, 4, T, Q> const& x); + + /// Converts a 3 * 4 matrix (augmented matrix rotation + translation) to a quaternion. + /// + /// @see gtx_dual_quaternion + template + GLM_FUNC_DECL tdualquat dualquat_cast(mat<3, 4, T, Q> const& x); + + + /// Dual-quaternion of low single-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat lowp_dualquat; + + /// Dual-quaternion of medium single-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat mediump_dualquat; + + /// Dual-quaternion of high single-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat highp_dualquat; + + + /// Dual-quaternion of low single-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat lowp_fdualquat; + + /// Dual-quaternion of medium single-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat mediump_fdualquat; + + /// Dual-quaternion of high single-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat highp_fdualquat; + + + /// Dual-quaternion of low double-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat lowp_ddualquat; + + /// Dual-quaternion of medium double-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat mediump_ddualquat; + + /// Dual-quaternion of high double-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef tdualquat highp_ddualquat; + + +#if(!defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT)) + /// Dual-quaternion of floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef highp_fdualquat dualquat; + + /// Dual-quaternion of single-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef highp_fdualquat fdualquat; +#elif(defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT)) + typedef highp_fdualquat dualquat; + typedef highp_fdualquat fdualquat; +#elif(!defined(GLM_PRECISION_HIGHP_FLOAT) && defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT)) + typedef mediump_fdualquat dualquat; + typedef mediump_fdualquat fdualquat; +#elif(!defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && defined(GLM_PRECISION_LOWP_FLOAT)) + typedef lowp_fdualquat dualquat; + typedef lowp_fdualquat fdualquat; +#else +# error "GLM error: multiple default precision requested for single-precision floating-point types" +#endif + + +#if(!defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE)) + /// Dual-quaternion of default double-qualifier floating-point numbers. + /// + /// @see gtx_dual_quaternion + typedef highp_ddualquat ddualquat; +#elif(defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE)) + typedef highp_ddualquat ddualquat; +#elif(!defined(GLM_PRECISION_HIGHP_DOUBLE) && defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE)) + typedef mediump_ddualquat ddualquat; +#elif(!defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && defined(GLM_PRECISION_LOWP_DOUBLE)) + typedef lowp_ddualquat ddualquat; +#else +# error "GLM error: Multiple default precision requested for double-precision floating-point types" +#endif + + /// @} +} //namespace glm + +#include "dual_quaternion.inl" diff --git a/MacroLibX/third_party/glm/gtx/dual_quaternion.inl b/MacroLibX/third_party/glm/gtx/dual_quaternion.inl new file mode 100644 index 0000000..fad07ea --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/dual_quaternion.inl @@ -0,0 +1,352 @@ +/// @ref gtx_dual_quaternion + +#include "../geometric.hpp" +#include + +namespace glm +{ + // -- Component accesses -- + + template + GLM_FUNC_QUALIFIER typename tdualquat::part_type & tdualquat::operator[](typename tdualquat::length_type i) + { + assert(i >= 0 && i < this->length()); + return (&real)[i]; + } + + template + GLM_FUNC_QUALIFIER typename tdualquat::part_type const& tdualquat::operator[](typename tdualquat::length_type i) const + { + assert(i >= 0 && i < this->length()); + return (&real)[i]; + } + + // -- Implicit basic constructors -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat() +# if GLM_CONFIG_DEFAULTED_FUNCTIONS != GLM_DISABLE + : real(qua()) + , dual(qua(0, 0, 0, 0)) +# endif + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(tdualquat const& d) + : real(d.real) + , dual(d.dual) + {} +# endif + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(tdualquat const& d) + : real(d.real) + , dual(d.dual) + {} + + // -- Explicit basic constructors -- + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(qua const& r) + : real(r), dual(qua(0, 0, 0, 0)) + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(qua const& q, vec<3, T, Q> const& p) + : real(q), dual( + T(-0.5) * ( p.x*q.x + p.y*q.y + p.z*q.z), + T(+0.5) * ( p.x*q.w + p.y*q.z - p.z*q.y), + T(+0.5) * (-p.x*q.z + p.y*q.w + p.z*q.x), + T(+0.5) * ( p.x*q.y - p.y*q.x + p.z*q.w)) + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(qua const& r, qua const& d) + : real(r), dual(d) + {} + + // -- Conversion constructors -- + + template + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(tdualquat const& q) + : real(q.real) + , dual(q.dual) + {} + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(mat<2, 4, T, Q> const& m) + { + *this = dualquat_cast(m); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(mat<3, 4, T, Q> const& m) + { + *this = dualquat_cast(m); + } + + // -- Unary arithmetic operators -- + +# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE + template + GLM_FUNC_QUALIFIER tdualquat & tdualquat::operator=(tdualquat const& q) + { + this->real = q.real; + this->dual = q.dual; + return *this; + } +# endif + + template + template + GLM_FUNC_QUALIFIER tdualquat & tdualquat::operator=(tdualquat const& q) + { + this->real = q.real; + this->dual = q.dual; + return *this; + } + + template + template + GLM_FUNC_QUALIFIER tdualquat & tdualquat::operator*=(U s) + { + this->real *= static_cast(s); + this->dual *= static_cast(s); + return *this; + } + + template + template + GLM_FUNC_QUALIFIER tdualquat & tdualquat::operator/=(U s) + { + this->real /= static_cast(s); + this->dual /= static_cast(s); + return *this; + } + + // -- Unary bit operators -- + + template + GLM_FUNC_QUALIFIER tdualquat operator+(tdualquat const& q) + { + return q; + } + + template + GLM_FUNC_QUALIFIER tdualquat operator-(tdualquat const& q) + { + return tdualquat(-q.real, -q.dual); + } + + // -- Binary operators -- + + template + GLM_FUNC_QUALIFIER tdualquat operator+(tdualquat const& q, tdualquat const& p) + { + return tdualquat(q.real + p.real,q.dual + p.dual); + } + + template + GLM_FUNC_QUALIFIER tdualquat operator*(tdualquat const& p, tdualquat const& o) + { + return tdualquat(p.real * o.real,p.real * o.dual + p.dual * o.real); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> operator*(tdualquat const& q, vec<3, T, Q> const& v) + { + vec<3, T, Q> const real_v3(q.real.x,q.real.y,q.real.z); + vec<3, T, Q> const dual_v3(q.dual.x,q.dual.y,q.dual.z); + return (cross(real_v3, cross(real_v3,v) + v * q.real.w + dual_v3) + dual_v3 * q.real.w - real_v3 * q.dual.w) * T(2) + v; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> operator*(vec<3, T, Q> const& v, tdualquat const& q) + { + return glm::inverse(q) * v; + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> operator*(tdualquat const& q, vec<4, T, Q> const& v) + { + return vec<4, T, Q>(q * vec<3, T, Q>(v), v.w); + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> operator*(vec<4, T, Q> const& v, tdualquat const& q) + { + return glm::inverse(q) * v; + } + + template + GLM_FUNC_QUALIFIER tdualquat operator*(tdualquat const& q, T const& s) + { + return tdualquat(q.real * s, q.dual * s); + } + + template + GLM_FUNC_QUALIFIER tdualquat operator*(T const& s, tdualquat const& q) + { + return q * s; + } + + template + GLM_FUNC_QUALIFIER tdualquat operator/(tdualquat const& q, T const& s) + { + return tdualquat(q.real / s, q.dual / s); + } + + // -- Boolean operators -- + + template + GLM_FUNC_QUALIFIER bool operator==(tdualquat const& q1, tdualquat const& q2) + { + return (q1.real == q2.real) && (q1.dual == q2.dual); + } + + template + GLM_FUNC_QUALIFIER bool operator!=(tdualquat const& q1, tdualquat const& q2) + { + return (q1.real != q2.real) || (q1.dual != q2.dual); + } + + // -- Operations -- + + template + GLM_FUNC_QUALIFIER tdualquat dual_quat_identity() + { + return tdualquat( + qua(static_cast(1), static_cast(0), static_cast(0), static_cast(0)), + qua(static_cast(0), static_cast(0), static_cast(0), static_cast(0))); + } + + template + GLM_FUNC_QUALIFIER tdualquat normalize(tdualquat const& q) + { + return q / length(q.real); + } + + template + GLM_FUNC_QUALIFIER tdualquat lerp(tdualquat const& x, tdualquat const& y, T const& a) + { + // Dual Quaternion Linear blend aka DLB: + // Lerp is only defined in [0, 1] + assert(a >= static_cast(0)); + assert(a <= static_cast(1)); + T const k = dot(x.real,y.real) < static_cast(0) ? -a : a; + T const one(1); + return tdualquat(x * (one - a) + y * k); + } + + template + GLM_FUNC_QUALIFIER tdualquat inverse(tdualquat const& q) + { + const glm::qua real = conjugate(q.real); + const glm::qua dual = conjugate(q.dual); + return tdualquat(real, dual + (real * (-2.0f * dot(real,dual)))); + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> mat2x4_cast(tdualquat const& x) + { + return mat<2, 4, T, Q>( x[0].x, x[0].y, x[0].z, x[0].w, x[1].x, x[1].y, x[1].z, x[1].w ); + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> mat3x4_cast(tdualquat const& x) + { + qua r = x.real / length2(x.real); + + qua const rr(r.w * x.real.w, r.x * x.real.x, r.y * x.real.y, r.z * x.real.z); + r *= static_cast(2); + + T const xy = r.x * x.real.y; + T const xz = r.x * x.real.z; + T const yz = r.y * x.real.z; + T const wx = r.w * x.real.x; + T const wy = r.w * x.real.y; + T const wz = r.w * x.real.z; + + vec<4, T, Q> const a( + rr.w + rr.x - rr.y - rr.z, + xy - wz, + xz + wy, + -(x.dual.w * r.x - x.dual.x * r.w + x.dual.y * r.z - x.dual.z * r.y)); + + vec<4, T, Q> const b( + xy + wz, + rr.w + rr.y - rr.x - rr.z, + yz - wx, + -(x.dual.w * r.y - x.dual.x * r.z - x.dual.y * r.w + x.dual.z * r.x)); + + vec<4, T, Q> const c( + xz - wy, + yz + wx, + rr.w + rr.z - rr.x - rr.y, + -(x.dual.w * r.z + x.dual.x * r.y - x.dual.y * r.x - x.dual.z * r.w)); + + return mat<3, 4, T, Q>(a, b, c); + } + + template + GLM_FUNC_QUALIFIER tdualquat dualquat_cast(mat<2, 4, T, Q> const& x) + { + return tdualquat( + qua( x[0].w, x[0].x, x[0].y, x[0].z ), + qua( x[1].w, x[1].x, x[1].y, x[1].z )); + } + + template + GLM_FUNC_QUALIFIER tdualquat dualquat_cast(mat<3, 4, T, Q> const& x) + { + qua real; + + T const trace = x[0].x + x[1].y + x[2].z; + if(trace > static_cast(0)) + { + T const r = sqrt(T(1) + trace); + T const invr = static_cast(0.5) / r; + real.w = static_cast(0.5) * r; + real.x = (x[2].y - x[1].z) * invr; + real.y = (x[0].z - x[2].x) * invr; + real.z = (x[1].x - x[0].y) * invr; + } + else if(x[0].x > x[1].y && x[0].x > x[2].z) + { + T const r = sqrt(T(1) + x[0].x - x[1].y - x[2].z); + T const invr = static_cast(0.5) / r; + real.x = static_cast(0.5)*r; + real.y = (x[1].x + x[0].y) * invr; + real.z = (x[0].z + x[2].x) * invr; + real.w = (x[2].y - x[1].z) * invr; + } + else if(x[1].y > x[2].z) + { + T const r = sqrt(T(1) + x[1].y - x[0].x - x[2].z); + T const invr = static_cast(0.5) / r; + real.x = (x[1].x + x[0].y) * invr; + real.y = static_cast(0.5) * r; + real.z = (x[2].y + x[1].z) * invr; + real.w = (x[0].z - x[2].x) * invr; + } + else + { + T const r = sqrt(T(1) + x[2].z - x[0].x - x[1].y); + T const invr = static_cast(0.5) / r; + real.x = (x[0].z + x[2].x) * invr; + real.y = (x[2].y + x[1].z) * invr; + real.z = static_cast(0.5) * r; + real.w = (x[1].x - x[0].y) * invr; + } + + qua dual; + dual.x = static_cast(0.5) * ( x[0].w * real.w + x[1].w * real.z - x[2].w * real.y); + dual.y = static_cast(0.5) * (-x[0].w * real.z + x[1].w * real.w + x[2].w * real.x); + dual.z = static_cast(0.5) * ( x[0].w * real.y - x[1].w * real.x + x[2].w * real.w); + dual.w = -static_cast(0.5) * ( x[0].w * real.x + x[1].w * real.y + x[2].w * real.z); + return tdualquat(real, dual); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/easing.hpp b/MacroLibX/third_party/glm/gtx/easing.hpp new file mode 100644 index 0000000..57f3d61 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/easing.hpp @@ -0,0 +1,219 @@ +/// @ref gtx_easing +/// @file glm/gtx/easing.hpp +/// @author Robert Chisholm +/// +/// @see core (dependence) +/// +/// @defgroup gtx_easing GLM_GTX_easing +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Easing functions for animations and transitons +/// All functions take a parameter x in the range [0.0,1.0] +/// +/// Based on the AHEasing project of Warren Moore (https://github.com/warrenm/AHEasing) + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/constants.hpp" +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_easing is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_easing extension included") +# endif +#endif + +namespace glm{ + /// @addtogroup gtx_easing + /// @{ + + /// Modelled after the line y = x + /// @see gtx_easing + template + GLM_FUNC_DECL genType linearInterpolation(genType const & a); + + /// Modelled after the parabola y = x^2 + /// @see gtx_easing + template + GLM_FUNC_DECL genType quadraticEaseIn(genType const & a); + + /// Modelled after the parabola y = -x^2 + 2x + /// @see gtx_easing + template + GLM_FUNC_DECL genType quadraticEaseOut(genType const & a); + + /// Modelled after the piecewise quadratic + /// y = (1/2)((2x)^2) ; [0, 0.5) + /// y = -(1/2)((2x-1)*(2x-3) - 1) ; [0.5, 1] + /// @see gtx_easing + template + GLM_FUNC_DECL genType quadraticEaseInOut(genType const & a); + + /// Modelled after the cubic y = x^3 + template + GLM_FUNC_DECL genType cubicEaseIn(genType const & a); + + /// Modelled after the cubic y = (x - 1)^3 + 1 + /// @see gtx_easing + template + GLM_FUNC_DECL genType cubicEaseOut(genType const & a); + + /// Modelled after the piecewise cubic + /// y = (1/2)((2x)^3) ; [0, 0.5) + /// y = (1/2)((2x-2)^3 + 2) ; [0.5, 1] + /// @see gtx_easing + template + GLM_FUNC_DECL genType cubicEaseInOut(genType const & a); + + /// Modelled after the quartic x^4 + /// @see gtx_easing + template + GLM_FUNC_DECL genType quarticEaseIn(genType const & a); + + /// Modelled after the quartic y = 1 - (x - 1)^4 + /// @see gtx_easing + template + GLM_FUNC_DECL genType quarticEaseOut(genType const & a); + + /// Modelled after the piecewise quartic + /// y = (1/2)((2x)^4) ; [0, 0.5) + /// y = -(1/2)((2x-2)^4 - 2) ; [0.5, 1] + /// @see gtx_easing + template + GLM_FUNC_DECL genType quarticEaseInOut(genType const & a); + + /// Modelled after the quintic y = x^5 + /// @see gtx_easing + template + GLM_FUNC_DECL genType quinticEaseIn(genType const & a); + + /// Modelled after the quintic y = (x - 1)^5 + 1 + /// @see gtx_easing + template + GLM_FUNC_DECL genType quinticEaseOut(genType const & a); + + /// Modelled after the piecewise quintic + /// y = (1/2)((2x)^5) ; [0, 0.5) + /// y = (1/2)((2x-2)^5 + 2) ; [0.5, 1] + /// @see gtx_easing + template + GLM_FUNC_DECL genType quinticEaseInOut(genType const & a); + + /// Modelled after quarter-cycle of sine wave + /// @see gtx_easing + template + GLM_FUNC_DECL genType sineEaseIn(genType const & a); + + /// Modelled after quarter-cycle of sine wave (different phase) + /// @see gtx_easing + template + GLM_FUNC_DECL genType sineEaseOut(genType const & a); + + /// Modelled after half sine wave + /// @see gtx_easing + template + GLM_FUNC_DECL genType sineEaseInOut(genType const & a); + + /// Modelled after shifted quadrant IV of unit circle + /// @see gtx_easing + template + GLM_FUNC_DECL genType circularEaseIn(genType const & a); + + /// Modelled after shifted quadrant II of unit circle + /// @see gtx_easing + template + GLM_FUNC_DECL genType circularEaseOut(genType const & a); + + /// Modelled after the piecewise circular function + /// y = (1/2)(1 - sqrt(1 - 4x^2)) ; [0, 0.5) + /// y = (1/2)(sqrt(-(2x - 3)*(2x - 1)) + 1) ; [0.5, 1] + /// @see gtx_easing + template + GLM_FUNC_DECL genType circularEaseInOut(genType const & a); + + /// Modelled after the exponential function y = 2^(10(x - 1)) + /// @see gtx_easing + template + GLM_FUNC_DECL genType exponentialEaseIn(genType const & a); + + /// Modelled after the exponential function y = -2^(-10x) + 1 + /// @see gtx_easing + template + GLM_FUNC_DECL genType exponentialEaseOut(genType const & a); + + /// Modelled after the piecewise exponential + /// y = (1/2)2^(10(2x - 1)) ; [0,0.5) + /// y = -(1/2)*2^(-10(2x - 1))) + 1 ; [0.5,1] + /// @see gtx_easing + template + GLM_FUNC_DECL genType exponentialEaseInOut(genType const & a); + + /// Modelled after the damped sine wave y = sin(13pi/2*x)*pow(2, 10 * (x - 1)) + /// @see gtx_easing + template + GLM_FUNC_DECL genType elasticEaseIn(genType const & a); + + /// Modelled after the damped sine wave y = sin(-13pi/2*(x + 1))*pow(2, -10x) + 1 + /// @see gtx_easing + template + GLM_FUNC_DECL genType elasticEaseOut(genType const & a); + + /// Modelled after the piecewise exponentially-damped sine wave: + /// y = (1/2)*sin(13pi/2*(2*x))*pow(2, 10 * ((2*x) - 1)) ; [0,0.5) + /// y = (1/2)*(sin(-13pi/2*((2x-1)+1))*pow(2,-10(2*x-1)) + 2) ; [0.5, 1] + /// @see gtx_easing + template + GLM_FUNC_DECL genType elasticEaseInOut(genType const & a); + + /// @see gtx_easing + template + GLM_FUNC_DECL genType backEaseIn(genType const& a); + + /// @see gtx_easing + template + GLM_FUNC_DECL genType backEaseOut(genType const& a); + + /// @see gtx_easing + template + GLM_FUNC_DECL genType backEaseInOut(genType const& a); + + /// @param a parameter + /// @param o Optional overshoot modifier + /// @see gtx_easing + template + GLM_FUNC_DECL genType backEaseIn(genType const& a, genType const& o); + + /// @param a parameter + /// @param o Optional overshoot modifier + /// @see gtx_easing + template + GLM_FUNC_DECL genType backEaseOut(genType const& a, genType const& o); + + /// @param a parameter + /// @param o Optional overshoot modifier + /// @see gtx_easing + template + GLM_FUNC_DECL genType backEaseInOut(genType const& a, genType const& o); + + /// @see gtx_easing + template + GLM_FUNC_DECL genType bounceEaseIn(genType const& a); + + /// @see gtx_easing + template + GLM_FUNC_DECL genType bounceEaseOut(genType const& a); + + /// @see gtx_easing + template + GLM_FUNC_DECL genType bounceEaseInOut(genType const& a); + + /// @} +}//namespace glm + +#include "easing.inl" diff --git a/MacroLibX/third_party/glm/gtx/easing.inl b/MacroLibX/third_party/glm/gtx/easing.inl new file mode 100644 index 0000000..4b7d05b --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/easing.inl @@ -0,0 +1,436 @@ +/// @ref gtx_easing + +#include + +namespace glm{ + + template + GLM_FUNC_QUALIFIER genType linearInterpolation(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return a; + } + + template + GLM_FUNC_QUALIFIER genType quadraticEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return a * a; + } + + template + GLM_FUNC_QUALIFIER genType quadraticEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return -(a * (a - static_cast(2))); + } + + template + GLM_FUNC_QUALIFIER genType quadraticEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a < static_cast(0.5)) + { + return static_cast(2) * a * a; + } + else + { + return (-static_cast(2) * a * a) + (4 * a) - one(); + } + } + + template + GLM_FUNC_QUALIFIER genType cubicEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return a * a * a; + } + + template + GLM_FUNC_QUALIFIER genType cubicEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + genType const f = a - one(); + return f * f * f + one(); + } + + template + GLM_FUNC_QUALIFIER genType cubicEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if (a < static_cast(0.5)) + { + return static_cast(4) * a * a * a; + } + else + { + genType const f = ((static_cast(2) * a) - static_cast(2)); + return static_cast(0.5) * f * f * f + one(); + } + } + + template + GLM_FUNC_QUALIFIER genType quarticEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return a * a * a * a; + } + + template + GLM_FUNC_QUALIFIER genType quarticEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + genType const f = (a - one()); + return f * f * f * (one() - a) + one(); + } + + template + GLM_FUNC_QUALIFIER genType quarticEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a < static_cast(0.5)) + { + return static_cast(8) * a * a * a * a; + } + else + { + genType const f = (a - one()); + return -static_cast(8) * f * f * f * f + one(); + } + } + + template + GLM_FUNC_QUALIFIER genType quinticEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return a * a * a * a * a; + } + + template + GLM_FUNC_QUALIFIER genType quinticEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + genType const f = (a - one()); + return f * f * f * f * f + one(); + } + + template + GLM_FUNC_QUALIFIER genType quinticEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a < static_cast(0.5)) + { + return static_cast(16) * a * a * a * a * a; + } + else + { + genType const f = ((static_cast(2) * a) - static_cast(2)); + return static_cast(0.5) * f * f * f * f * f + one(); + } + } + + template + GLM_FUNC_QUALIFIER genType sineEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return sin((a - one()) * half_pi()) + one(); + } + + template + GLM_FUNC_QUALIFIER genType sineEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return sin(a * half_pi()); + } + + template + GLM_FUNC_QUALIFIER genType sineEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return static_cast(0.5) * (one() - cos(a * pi())); + } + + template + GLM_FUNC_QUALIFIER genType circularEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return one() - sqrt(one() - (a * a)); + } + + template + GLM_FUNC_QUALIFIER genType circularEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return sqrt((static_cast(2) - a) * a); + } + + template + GLM_FUNC_QUALIFIER genType circularEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a < static_cast(0.5)) + { + return static_cast(0.5) * (one() - std::sqrt(one() - static_cast(4) * (a * a))); + } + else + { + return static_cast(0.5) * (std::sqrt(-((static_cast(2) * a) - static_cast(3)) * ((static_cast(2) * a) - one())) + one()); + } + } + + template + GLM_FUNC_QUALIFIER genType exponentialEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a <= zero()) + return a; + else + { + genType const Complementary = a - one(); + genType const Two = static_cast(2); + + return glm::pow(Two, Complementary * static_cast(10)); + } + } + + template + GLM_FUNC_QUALIFIER genType exponentialEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a >= one()) + return a; + else + { + return one() - glm::pow(static_cast(2), -static_cast(10) * a); + } + } + + template + GLM_FUNC_QUALIFIER genType exponentialEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a < static_cast(0.5)) + return static_cast(0.5) * glm::pow(static_cast(2), (static_cast(20) * a) - static_cast(10)); + else + return -static_cast(0.5) * glm::pow(static_cast(2), (-static_cast(20) * a) + static_cast(10)) + one(); + } + + template + GLM_FUNC_QUALIFIER genType elasticEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return std::sin(static_cast(13) * half_pi() * a) * glm::pow(static_cast(2), static_cast(10) * (a - one())); + } + + template + GLM_FUNC_QUALIFIER genType elasticEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return std::sin(-static_cast(13) * half_pi() * (a + one())) * glm::pow(static_cast(2), -static_cast(10) * a) + one(); + } + + template + GLM_FUNC_QUALIFIER genType elasticEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a < static_cast(0.5)) + return static_cast(0.5) * std::sin(static_cast(13) * half_pi() * (static_cast(2) * a)) * glm::pow(static_cast(2), static_cast(10) * ((static_cast(2) * a) - one())); + else + return static_cast(0.5) * (std::sin(-static_cast(13) * half_pi() * ((static_cast(2) * a - one()) + one())) * glm::pow(static_cast(2), -static_cast(10) * (static_cast(2) * a - one())) + static_cast(2)); + } + + template + GLM_FUNC_QUALIFIER genType backEaseIn(genType const& a, genType const& o) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + genType z = ((o + one()) * a) - o; + return (a * a * z); + } + + template + GLM_FUNC_QUALIFIER genType backEaseOut(genType const& a, genType const& o) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + genType n = a - one(); + genType z = ((o + one()) * n) + o; + return (n * n * z) + one(); + } + + template + GLM_FUNC_QUALIFIER genType backEaseInOut(genType const& a, genType const& o) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + genType s = o * static_cast(1.525); + genType x = static_cast(0.5); + genType n = a / static_cast(0.5); + + if (n < static_cast(1)) + { + genType z = ((s + static_cast(1)) * n) - s; + genType m = n * n * z; + return x * m; + } + else + { + n -= static_cast(2); + genType z = ((s + static_cast(1)) * n) + s; + genType m = (n*n*z) + static_cast(2); + return x * m; + } + } + + template + GLM_FUNC_QUALIFIER genType backEaseIn(genType const& a) + { + return backEaseIn(a, static_cast(1.70158)); + } + + template + GLM_FUNC_QUALIFIER genType backEaseOut(genType const& a) + { + return backEaseOut(a, static_cast(1.70158)); + } + + template + GLM_FUNC_QUALIFIER genType backEaseInOut(genType const& a) + { + return backEaseInOut(a, static_cast(1.70158)); + } + + template + GLM_FUNC_QUALIFIER genType bounceEaseOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a < static_cast(4.0 / 11.0)) + { + return (static_cast(121) * a * a) / static_cast(16); + } + else if(a < static_cast(8.0 / 11.0)) + { + return (static_cast(363.0 / 40.0) * a * a) - (static_cast(99.0 / 10.0) * a) + static_cast(17.0 / 5.0); + } + else if(a < static_cast(9.0 / 10.0)) + { + return (static_cast(4356.0 / 361.0) * a * a) - (static_cast(35442.0 / 1805.0) * a) + static_cast(16061.0 / 1805.0); + } + else + { + return (static_cast(54.0 / 5.0) * a * a) - (static_cast(513.0 / 25.0) * a) + static_cast(268.0 / 25.0); + } + } + + template + GLM_FUNC_QUALIFIER genType bounceEaseIn(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + return one() - bounceEaseOut(one() - a); + } + + template + GLM_FUNC_QUALIFIER genType bounceEaseInOut(genType const& a) + { + // Only defined in [0, 1] + assert(a >= zero()); + assert(a <= one()); + + if(a < static_cast(0.5)) + { + return static_cast(0.5) * (one() - bounceEaseOut(a * static_cast(2))); + } + else + { + return static_cast(0.5) * bounceEaseOut(a * static_cast(2) - one()) + static_cast(0.5); + } + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/euler_angles.hpp b/MacroLibX/third_party/glm/gtx/euler_angles.hpp new file mode 100644 index 0000000..2723697 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/euler_angles.hpp @@ -0,0 +1,335 @@ +/// @ref gtx_euler_angles +/// @file glm/gtx/euler_angles.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_euler_angles GLM_GTX_euler_angles +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Build matrices from Euler angles. +/// +/// Extraction of Euler angles from rotation matrix. +/// Based on the original paper 2014 Mike Day - Extracting Euler Angles from a Rotation Matrix. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_euler_angles is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_euler_angles extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_euler_angles + /// @{ + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from an euler angle X. + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleX( + T const& angleX); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from an euler angle Y. + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleY( + T const& angleY); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from an euler angle Z. + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZ( + T const& angleZ); + + /// Creates a 3D 4 * 4 homogeneous derived matrix from the rotation matrix about X-axis. + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> derivedEulerAngleX( + T const & angleX, T const & angularVelocityX); + + /// Creates a 3D 4 * 4 homogeneous derived matrix from the rotation matrix about Y-axis. + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> derivedEulerAngleY( + T const & angleY, T const & angularVelocityY); + + /// Creates a 3D 4 * 4 homogeneous derived matrix from the rotation matrix about Z-axis. + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> derivedEulerAngleZ( + T const & angleZ, T const & angularVelocityZ); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Y). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXY( + T const& angleX, + T const& angleY); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYX( + T const& angleY, + T const& angleX); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXZ( + T const& angleX, + T const& angleZ); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * X). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZX( + T const& angle, + T const& angleX); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYZ( + T const& angleY, + T const& angleZ); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * Y). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZY( + T const& angleZ, + T const& angleY); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Y * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXYZ( + T const& t1, + T const& t2, + T const& t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYXZ( + T const& yaw, + T const& pitch, + T const& roll); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Z * X). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXZX( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Y * X). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXYX( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X * Y). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYXY( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * Z * Y). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYZY( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * Y * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZYZ( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * X * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZXZ( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Z * Y). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXZY( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * Z * X). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYZX( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * Y * X). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZYX( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * X * Y). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZXY( + T const & t1, + T const & t2, + T const & t3); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, defaultp> yawPitchRoll( + T const& yaw, + T const& pitch, + T const& roll); + + /// Creates a 2D 2 * 2 rotation matrix from an euler angle. + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<2, 2, T, defaultp> orientate2(T const& angle); + + /// Creates a 2D 4 * 4 homogeneous rotation matrix from an euler angle. + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<3, 3, T, defaultp> orientate3(T const& angle); + + /// Creates a 3D 3 * 3 rotation matrix from euler angles (Y * X * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<3, 3, T, Q> orientate3(vec<3, T, Q> const& angles); + + /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X * Z). + /// @see gtx_euler_angles + template + GLM_FUNC_DECL mat<4, 4, T, Q> orientate4(vec<3, T, Q> const& angles); + + /// Extracts the (X * Y * Z) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleXYZ(mat<4, 4, T, defaultp> const& M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (Y * X * Z) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleYXZ(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (X * Z * X) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleXZX(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (X * Y * X) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleXYX(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (Y * X * Y) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleYXY(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (Y * Z * Y) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleYZY(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (Z * Y * Z) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleZYZ(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (Z * X * Z) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleZXZ(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (X * Z * Y) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleXZY(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (Y * Z * X) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleYZX(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (Z * Y * X) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleZYX(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// Extracts the (Z * X * Y) Euler angles from the rotation matrix M + /// @see gtx_euler_angles + template + GLM_FUNC_DECL void extractEulerAngleZXY(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3); + + /// @} +}//namespace glm + +#include "euler_angles.inl" diff --git a/MacroLibX/third_party/glm/gtx/euler_angles.inl b/MacroLibX/third_party/glm/gtx/euler_angles.inl new file mode 100644 index 0000000..68c5012 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/euler_angles.inl @@ -0,0 +1,899 @@ +/// @ref gtx_euler_angles + +#include "compatibility.hpp" // glm::atan2 + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleX + ( + T const& angleX + ) + { + T cosX = glm::cos(angleX); + T sinX = glm::sin(angleX); + + return mat<4, 4, T, defaultp>( + T(1), T(0), T(0), T(0), + T(0), cosX, sinX, T(0), + T(0),-sinX, cosX, T(0), + T(0), T(0), T(0), T(1)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleY + ( + T const& angleY + ) + { + T cosY = glm::cos(angleY); + T sinY = glm::sin(angleY); + + return mat<4, 4, T, defaultp>( + cosY, T(0), -sinY, T(0), + T(0), T(1), T(0), T(0), + sinY, T(0), cosY, T(0), + T(0), T(0), T(0), T(1)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZ + ( + T const& angleZ + ) + { + T cosZ = glm::cos(angleZ); + T sinZ = glm::sin(angleZ); + + return mat<4, 4, T, defaultp>( + cosZ, sinZ, T(0), T(0), + -sinZ, cosZ, T(0), T(0), + T(0), T(0), T(1), T(0), + T(0), T(0), T(0), T(1)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> derivedEulerAngleX + ( + T const & angleX, + T const & angularVelocityX + ) + { + T cosX = glm::cos(angleX) * angularVelocityX; + T sinX = glm::sin(angleX) * angularVelocityX; + + return mat<4, 4, T, defaultp>( + T(0), T(0), T(0), T(0), + T(0),-sinX, cosX, T(0), + T(0),-cosX,-sinX, T(0), + T(0), T(0), T(0), T(0)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> derivedEulerAngleY + ( + T const & angleY, + T const & angularVelocityY + ) + { + T cosY = glm::cos(angleY) * angularVelocityY; + T sinY = glm::sin(angleY) * angularVelocityY; + + return mat<4, 4, T, defaultp>( + -sinY, T(0), -cosY, T(0), + T(0), T(0), T(0), T(0), + cosY, T(0), -sinY, T(0), + T(0), T(0), T(0), T(0)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> derivedEulerAngleZ + ( + T const & angleZ, + T const & angularVelocityZ + ) + { + T cosZ = glm::cos(angleZ) * angularVelocityZ; + T sinZ = glm::sin(angleZ) * angularVelocityZ; + + return mat<4, 4, T, defaultp>( + -sinZ, cosZ, T(0), T(0), + -cosZ, -sinZ, T(0), T(0), + T(0), T(0), T(0), T(0), + T(0), T(0), T(0), T(0)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXY + ( + T const& angleX, + T const& angleY + ) + { + T cosX = glm::cos(angleX); + T sinX = glm::sin(angleX); + T cosY = glm::cos(angleY); + T sinY = glm::sin(angleY); + + return mat<4, 4, T, defaultp>( + cosY, -sinX * -sinY, cosX * -sinY, T(0), + T(0), cosX, sinX, T(0), + sinY, -sinX * cosY, cosX * cosY, T(0), + T(0), T(0), T(0), T(1)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYX + ( + T const& angleY, + T const& angleX + ) + { + T cosX = glm::cos(angleX); + T sinX = glm::sin(angleX); + T cosY = glm::cos(angleY); + T sinY = glm::sin(angleY); + + return mat<4, 4, T, defaultp>( + cosY, 0, -sinY, T(0), + sinY * sinX, cosX, cosY * sinX, T(0), + sinY * cosX, -sinX, cosY * cosX, T(0), + T(0), T(0), T(0), T(1)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXZ + ( + T const& angleX, + T const& angleZ + ) + { + return eulerAngleX(angleX) * eulerAngleZ(angleZ); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZX + ( + T const& angleZ, + T const& angleX + ) + { + return eulerAngleZ(angleZ) * eulerAngleX(angleX); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYZ + ( + T const& angleY, + T const& angleZ + ) + { + return eulerAngleY(angleY) * eulerAngleZ(angleZ); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZY + ( + T const& angleZ, + T const& angleY + ) + { + return eulerAngleZ(angleZ) * eulerAngleY(angleY); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXYZ + ( + T const& t1, + T const& t2, + T const& t3 + ) + { + T c1 = glm::cos(-t1); + T c2 = glm::cos(-t2); + T c3 = glm::cos(-t3); + T s1 = glm::sin(-t1); + T s2 = glm::sin(-t2); + T s3 = glm::sin(-t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c2 * c3; + Result[0][1] =-c1 * s3 + s1 * s2 * c3; + Result[0][2] = s1 * s3 + c1 * s2 * c3; + Result[0][3] = static_cast(0); + Result[1][0] = c2 * s3; + Result[1][1] = c1 * c3 + s1 * s2 * s3; + Result[1][2] =-s1 * c3 + c1 * s2 * s3; + Result[1][3] = static_cast(0); + Result[2][0] =-s2; + Result[2][1] = s1 * c2; + Result[2][2] = c1 * c2; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYXZ + ( + T const& yaw, + T const& pitch, + T const& roll + ) + { + T tmp_ch = glm::cos(yaw); + T tmp_sh = glm::sin(yaw); + T tmp_cp = glm::cos(pitch); + T tmp_sp = glm::sin(pitch); + T tmp_cb = glm::cos(roll); + T tmp_sb = glm::sin(roll); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = tmp_ch * tmp_cb + tmp_sh * tmp_sp * tmp_sb; + Result[0][1] = tmp_sb * tmp_cp; + Result[0][2] = -tmp_sh * tmp_cb + tmp_ch * tmp_sp * tmp_sb; + Result[0][3] = static_cast(0); + Result[1][0] = -tmp_ch * tmp_sb + tmp_sh * tmp_sp * tmp_cb; + Result[1][1] = tmp_cb * tmp_cp; + Result[1][2] = tmp_sb * tmp_sh + tmp_ch * tmp_sp * tmp_cb; + Result[1][3] = static_cast(0); + Result[2][0] = tmp_sh * tmp_cp; + Result[2][1] = -tmp_sp; + Result[2][2] = tmp_ch * tmp_cp; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXZX + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c2; + Result[0][1] = c1 * s2; + Result[0][2] = s1 * s2; + Result[0][3] = static_cast(0); + Result[1][0] =-c3 * s2; + Result[1][1] = c1 * c2 * c3 - s1 * s3; + Result[1][2] = c1 * s3 + c2 * c3 * s1; + Result[1][3] = static_cast(0); + Result[2][0] = s2 * s3; + Result[2][1] =-c3 * s1 - c1 * c2 * s3; + Result[2][2] = c1 * c3 - c2 * s1 * s3; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXYX + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c2; + Result[0][1] = s1 * s2; + Result[0][2] =-c1 * s2; + Result[0][3] = static_cast(0); + Result[1][0] = s2 * s3; + Result[1][1] = c1 * c3 - c2 * s1 * s3; + Result[1][2] = c3 * s1 + c1 * c2 * s3; + Result[1][3] = static_cast(0); + Result[2][0] = c3 * s2; + Result[2][1] =-c1 * s3 - c2 * c3 * s1; + Result[2][2] = c1 * c2 * c3 - s1 * s3; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYXY + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c1 * c3 - c2 * s1 * s3; + Result[0][1] = s2* s3; + Result[0][2] =-c3 * s1 - c1 * c2 * s3; + Result[0][3] = static_cast(0); + Result[1][0] = s1 * s2; + Result[1][1] = c2; + Result[1][2] = c1 * s2; + Result[1][3] = static_cast(0); + Result[2][0] = c1 * s3 + c2 * c3 * s1; + Result[2][1] =-c3 * s2; + Result[2][2] = c1 * c2 * c3 - s1 * s3; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYZY + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c1 * c2 * c3 - s1 * s3; + Result[0][1] = c3 * s2; + Result[0][2] =-c1 * s3 - c2 * c3 * s1; + Result[0][3] = static_cast(0); + Result[1][0] =-c1 * s2; + Result[1][1] = c2; + Result[1][2] = s1 * s2; + Result[1][3] = static_cast(0); + Result[2][0] = c3 * s1 + c1 * c2 * s3; + Result[2][1] = s2 * s3; + Result[2][2] = c1 * c3 - c2 * s1 * s3; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZYZ + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c1 * c2 * c3 - s1 * s3; + Result[0][1] = c1 * s3 + c2 * c3 * s1; + Result[0][2] =-c3 * s2; + Result[0][3] = static_cast(0); + Result[1][0] =-c3 * s1 - c1 * c2 * s3; + Result[1][1] = c1 * c3 - c2 * s1 * s3; + Result[1][2] = s2 * s3; + Result[1][3] = static_cast(0); + Result[2][0] = c1 * s2; + Result[2][1] = s1 * s2; + Result[2][2] = c2; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZXZ + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c1 * c3 - c2 * s1 * s3; + Result[0][1] = c3 * s1 + c1 * c2 * s3; + Result[0][2] = s2 *s3; + Result[0][3] = static_cast(0); + Result[1][0] =-c1 * s3 - c2 * c3 * s1; + Result[1][1] = c1 * c2 * c3 - s1 * s3; + Result[1][2] = c3 * s2; + Result[1][3] = static_cast(0); + Result[2][0] = s1 * s2; + Result[2][1] =-c1 * s2; + Result[2][2] = c2; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXZY + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c2 * c3; + Result[0][1] = s1 * s3 + c1 * c3 * s2; + Result[0][2] = c3 * s1 * s2 - c1 * s3; + Result[0][3] = static_cast(0); + Result[1][0] =-s2; + Result[1][1] = c1 * c2; + Result[1][2] = c2 * s1; + Result[1][3] = static_cast(0); + Result[2][0] = c2 * s3; + Result[2][1] = c1 * s2 * s3 - c3 * s1; + Result[2][2] = c1 * c3 + s1 * s2 *s3; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYZX + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c1 * c2; + Result[0][1] = s2; + Result[0][2] =-c2 * s1; + Result[0][3] = static_cast(0); + Result[1][0] = s1 * s3 - c1 * c3 * s2; + Result[1][1] = c2 * c3; + Result[1][2] = c1 * s3 + c3 * s1 * s2; + Result[1][3] = static_cast(0); + Result[2][0] = c3 * s1 + c1 * s2 * s3; + Result[2][1] =-c2 * s3; + Result[2][2] = c1 * c3 - s1 * s2 * s3; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZYX + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c1 * c2; + Result[0][1] = c2 * s1; + Result[0][2] =-s2; + Result[0][3] = static_cast(0); + Result[1][0] = c1 * s2 * s3 - c3 * s1; + Result[1][1] = c1 * c3 + s1 * s2 * s3; + Result[1][2] = c2 * s3; + Result[1][3] = static_cast(0); + Result[2][0] = s1 * s3 + c1 * c3 * s2; + Result[2][1] = c3 * s1 * s2 - c1 * s3; + Result[2][2] = c2 * c3; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZXY + ( + T const & t1, + T const & t2, + T const & t3 + ) + { + T c1 = glm::cos(t1); + T s1 = glm::sin(t1); + T c2 = glm::cos(t2); + T s2 = glm::sin(t2); + T c3 = glm::cos(t3); + T s3 = glm::sin(t3); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = c1 * c3 - s1 * s2 * s3; + Result[0][1] = c3 * s1 + c1 * s2 * s3; + Result[0][2] =-c2 * s3; + Result[0][3] = static_cast(0); + Result[1][0] =-c2 * s1; + Result[1][1] = c1 * c2; + Result[1][2] = s2; + Result[1][3] = static_cast(0); + Result[2][0] = c1 * s3 + c3 * s1 * s2; + Result[2][1] = s1 * s3 - c1 * c3 * s2; + Result[2][2] = c2 * c3; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> yawPitchRoll + ( + T const& yaw, + T const& pitch, + T const& roll + ) + { + T tmp_ch = glm::cos(yaw); + T tmp_sh = glm::sin(yaw); + T tmp_cp = glm::cos(pitch); + T tmp_sp = glm::sin(pitch); + T tmp_cb = glm::cos(roll); + T tmp_sb = glm::sin(roll); + + mat<4, 4, T, defaultp> Result; + Result[0][0] = tmp_ch * tmp_cb + tmp_sh * tmp_sp * tmp_sb; + Result[0][1] = tmp_sb * tmp_cp; + Result[0][2] = -tmp_sh * tmp_cb + tmp_ch * tmp_sp * tmp_sb; + Result[0][3] = static_cast(0); + Result[1][0] = -tmp_ch * tmp_sb + tmp_sh * tmp_sp * tmp_cb; + Result[1][1] = tmp_cb * tmp_cp; + Result[1][2] = tmp_sb * tmp_sh + tmp_ch * tmp_sp * tmp_cb; + Result[1][3] = static_cast(0); + Result[2][0] = tmp_sh * tmp_cp; + Result[2][1] = -tmp_sp; + Result[2][2] = tmp_ch * tmp_cp; + Result[2][3] = static_cast(0); + Result[3][0] = static_cast(0); + Result[3][1] = static_cast(0); + Result[3][2] = static_cast(0); + Result[3][3] = static_cast(1); + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, defaultp> orientate2 + ( + T const& angle + ) + { + T c = glm::cos(angle); + T s = glm::sin(angle); + + mat<2, 2, T, defaultp> Result; + Result[0][0] = c; + Result[0][1] = s; + Result[1][0] = -s; + Result[1][1] = c; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, defaultp> orientate3 + ( + T const& angle + ) + { + T c = glm::cos(angle); + T s = glm::sin(angle); + + mat<3, 3, T, defaultp> Result; + Result[0][0] = c; + Result[0][1] = s; + Result[0][2] = 0.0f; + Result[1][0] = -s; + Result[1][1] = c; + Result[1][2] = 0.0f; + Result[2][0] = 0.0f; + Result[2][1] = 0.0f; + Result[2][2] = 1.0f; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> orientate3 + ( + vec<3, T, Q> const& angles + ) + { + return mat<3, 3, T, Q>(yawPitchRoll(angles.z, angles.x, angles.y)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> orientate4 + ( + vec<3, T, Q> const& angles + ) + { + return yawPitchRoll(angles.z, angles.x, angles.y); + } + + template + GLM_FUNC_DECL void extractEulerAngleXYZ(mat<4, 4, T, defaultp> const& M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[2][1], M[2][2]); + T C2 = glm::sqrt(M[0][0]*M[0][0] + M[1][0]*M[1][0]); + T T2 = glm::atan2(-M[2][0], C2); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(S1*M[0][2] - C1*M[0][1], C1*M[1][1] - S1*M[1][2 ]); + t1 = -T1; + t2 = -T2; + t3 = -T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleYXZ(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[2][0], M[2][2]); + T C2 = glm::sqrt(M[0][1]*M[0][1] + M[1][1]*M[1][1]); + T T2 = glm::atan2(-M[2][1], C2); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(S1*M[1][2] - C1*M[1][0], C1*M[0][0] - S1*M[0][2]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleXZX(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[0][2], M[0][1]); + T S2 = glm::sqrt(M[1][0]*M[1][0] + M[2][0]*M[2][0]); + T T2 = glm::atan2(S2, M[0][0]); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(C1*M[1][2] - S1*M[1][1], C1*M[2][2] - S1*M[2][1]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleXYX(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[0][1], -M[0][2]); + T S2 = glm::sqrt(M[1][0]*M[1][0] + M[2][0]*M[2][0]); + T T2 = glm::atan2(S2, M[0][0]); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(-C1*M[2][1] - S1*M[2][2], C1*M[1][1] + S1*M[1][2]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleYXY(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[1][0], M[1][2]); + T S2 = glm::sqrt(M[0][1]*M[0][1] + M[2][1]*M[2][1]); + T T2 = glm::atan2(S2, M[1][1]); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(C1*M[2][0] - S1*M[2][2], C1*M[0][0] - S1*M[0][2]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleYZY(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[1][2], -M[1][0]); + T S2 = glm::sqrt(M[0][1]*M[0][1] + M[2][1]*M[2][1]); + T T2 = glm::atan2(S2, M[1][1]); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(-S1*M[0][0] - C1*M[0][2], S1*M[2][0] + C1*M[2][2]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleZYZ(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[2][1], M[2][0]); + T S2 = glm::sqrt(M[0][2]*M[0][2] + M[1][2]*M[1][2]); + T T2 = glm::atan2(S2, M[2][2]); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(C1*M[0][1] - S1*M[0][0], C1*M[1][1] - S1*M[1][0]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleZXZ(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[2][0], -M[2][1]); + T S2 = glm::sqrt(M[0][2]*M[0][2] + M[1][2]*M[1][2]); + T T2 = glm::atan2(S2, M[2][2]); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(-C1*M[1][0] - S1*M[1][1], C1*M[0][0] + S1*M[0][1]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleXZY(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[1][2], M[1][1]); + T C2 = glm::sqrt(M[0][0]*M[0][0] + M[2][0]*M[2][0]); + T T2 = glm::atan2(-M[1][0], C2); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(S1*M[0][1] - C1*M[0][2], C1*M[2][2] - S1*M[2][1]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleYZX(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(-M[0][2], M[0][0]); + T C2 = glm::sqrt(M[1][1]*M[1][1] + M[2][1]*M[2][1]); + T T2 = glm::atan2(M[0][1], C2); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(S1*M[1][0] + C1*M[1][2], S1*M[2][0] + C1*M[2][2]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleZYX(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(M[0][1], M[0][0]); + T C2 = glm::sqrt(M[1][2]*M[1][2] + M[2][2]*M[2][2]); + T T2 = glm::atan2(-M[0][2], C2); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(S1*M[2][0] - C1*M[2][1], C1*M[1][1] - S1*M[1][0]); + t1 = T1; + t2 = T2; + t3 = T3; + } + + template + GLM_FUNC_QUALIFIER void extractEulerAngleZXY(mat<4, 4, T, defaultp> const & M, + T & t1, + T & t2, + T & t3) + { + T T1 = glm::atan2(-M[1][0], M[1][1]); + T C2 = glm::sqrt(M[0][2]*M[0][2] + M[2][2]*M[2][2]); + T T2 = glm::atan2(M[1][2], C2); + T S1 = glm::sin(T1); + T C1 = glm::cos(T1); + T T3 = glm::atan2(C1*M[2][0] + S1*M[2][1], C1*M[0][0] + S1*M[0][1]); + t1 = T1; + t2 = T2; + t3 = T3; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/extend.hpp b/MacroLibX/third_party/glm/gtx/extend.hpp new file mode 100644 index 0000000..28b7c5c --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/extend.hpp @@ -0,0 +1,42 @@ +/// @ref gtx_extend +/// @file glm/gtx/extend.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_extend GLM_GTX_extend +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Extend a position from a source to a position at a defined length. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_extend is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_extend extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_extend + /// @{ + + /// Extends of Length the Origin position using the (Source - Origin) direction. + /// @see gtx_extend + template + GLM_FUNC_DECL genType extend( + genType const& Origin, + genType const& Source, + typename genType::value_type const Length); + + /// @} +}//namespace glm + +#include "extend.inl" diff --git a/MacroLibX/third_party/glm/gtx/extend.inl b/MacroLibX/third_party/glm/gtx/extend.inl new file mode 100644 index 0000000..32128eb --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/extend.inl @@ -0,0 +1,48 @@ +/// @ref gtx_extend + +namespace glm +{ + template + GLM_FUNC_QUALIFIER genType extend + ( + genType const& Origin, + genType const& Source, + genType const& Distance + ) + { + return Origin + (Source - Origin) * Distance; + } + + template + GLM_FUNC_QUALIFIER vec<2, T, Q> extend + ( + vec<2, T, Q> const& Origin, + vec<2, T, Q> const& Source, + T const& Distance + ) + { + return Origin + (Source - Origin) * Distance; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> extend + ( + vec<3, T, Q> const& Origin, + vec<3, T, Q> const& Source, + T const& Distance + ) + { + return Origin + (Source - Origin) * Distance; + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> extend + ( + vec<4, T, Q> const& Origin, + vec<4, T, Q> const& Source, + T const& Distance + ) + { + return Origin + (Source - Origin) * Distance; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/extended_min_max.hpp b/MacroLibX/third_party/glm/gtx/extended_min_max.hpp new file mode 100644 index 0000000..ad23a91 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/extended_min_max.hpp @@ -0,0 +1,182 @@ +/// @ref gtx_extended_min_max +/// @file glm/gtx/extended_min_max.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_extended_min_max GLM_GTX_extented_min_max +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Min and max functions for 3 to 4 parameters. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_extented_min_max is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_extented_min_max extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_extended_min_max + /// @{ + + /// Return the minimum component-wise values of 3 inputs + /// @see gtx_extented_min_max + template + GLM_FUNC_DECL T min( + T const& x, + T const& y, + T const& z); + + /// Return the minimum component-wise values of 3 inputs + /// @see gtx_extented_min_max + template class C> + GLM_FUNC_DECL C min( + C const& x, + typename C::T const& y, + typename C::T const& z); + + /// Return the minimum component-wise values of 3 inputs + /// @see gtx_extented_min_max + template class C> + GLM_FUNC_DECL C min( + C const& x, + C const& y, + C const& z); + + /// Return the minimum component-wise values of 4 inputs + /// @see gtx_extented_min_max + template + GLM_FUNC_DECL T min( + T const& x, + T const& y, + T const& z, + T const& w); + + /// Return the minimum component-wise values of 4 inputs + /// @see gtx_extented_min_max + template class C> + GLM_FUNC_DECL C min( + C const& x, + typename C::T const& y, + typename C::T const& z, + typename C::T const& w); + + /// Return the minimum component-wise values of 4 inputs + /// @see gtx_extented_min_max + template class C> + GLM_FUNC_DECL C min( + C const& x, + C const& y, + C const& z, + C const& w); + + /// Return the maximum component-wise values of 3 inputs + /// @see gtx_extented_min_max + template + GLM_FUNC_DECL T max( + T const& x, + T const& y, + T const& z); + + /// Return the maximum component-wise values of 3 inputs + /// @see gtx_extented_min_max + template class C> + GLM_FUNC_DECL C max( + C const& x, + typename C::T const& y, + typename C::T const& z); + + /// Return the maximum component-wise values of 3 inputs + /// @see gtx_extented_min_max + template class C> + GLM_FUNC_DECL C max( + C const& x, + C const& y, + C const& z); + + /// Return the maximum component-wise values of 4 inputs + /// @see gtx_extented_min_max + template + GLM_FUNC_DECL T max( + T const& x, + T const& y, + T const& z, + T const& w); + + /// Return the maximum component-wise values of 4 inputs + /// @see gtx_extented_min_max + template class C> + GLM_FUNC_DECL C max( + C const& x, + typename C::T const& y, + typename C::T const& z, + typename C::T const& w); + + /// Return the maximum component-wise values of 4 inputs + /// @see gtx_extented_min_max + template class C> + GLM_FUNC_DECL C max( + C const& x, + C const& y, + C const& z, + C const& w); + + /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam genType Floating-point or integer; scalar or vector types. + /// + /// @see gtx_extented_min_max + template + GLM_FUNC_DECL genType fmin(genType x, genType y); + + /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam genType Floating-point; scalar or vector types. + /// + /// @see gtx_extented_min_max + /// @see std::fmax documentation + template + GLM_FUNC_DECL genType fmax(genType x, genType y); + + /// Returns min(max(x, minVal), maxVal) for each component in x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam genType Floating-point scalar or vector types. + /// + /// @see gtx_extented_min_max + template + GLM_FUNC_DECL genType fclamp(genType x, genType minVal, genType maxVal); + + /// Returns min(max(x, minVal), maxVal) for each component in x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtx_extented_min_max + template + GLM_FUNC_DECL vec fclamp(vec const& x, T minVal, T maxVal); + + /// Returns min(max(x, minVal), maxVal) for each component in x. If one of the two arguments is NaN, the value of the other argument is returned. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see gtx_extented_min_max + template + GLM_FUNC_DECL vec fclamp(vec const& x, vec const& minVal, vec const& maxVal); + + + /// @} +}//namespace glm + +#include "extended_min_max.inl" diff --git a/MacroLibX/third_party/glm/gtx/extended_min_max.inl b/MacroLibX/third_party/glm/gtx/extended_min_max.inl new file mode 100644 index 0000000..e72d1cc --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/extended_min_max.inl @@ -0,0 +1,218 @@ +/// @ref gtx_extended_min_max + +namespace glm +{ + template + GLM_FUNC_QUALIFIER T min( + T const& x, + T const& y, + T const& z) + { + return glm::min(glm::min(x, y), z); + } + + template class C> + GLM_FUNC_QUALIFIER C min + ( + C const& x, + typename C::T const& y, + typename C::T const& z + ) + { + return glm::min(glm::min(x, y), z); + } + + template class C> + GLM_FUNC_QUALIFIER C min + ( + C const& x, + C const& y, + C const& z + ) + { + return glm::min(glm::min(x, y), z); + } + + template + GLM_FUNC_QUALIFIER T min + ( + T const& x, + T const& y, + T const& z, + T const& w + ) + { + return glm::min(glm::min(x, y), glm::min(z, w)); + } + + template class C> + GLM_FUNC_QUALIFIER C min + ( + C const& x, + typename C::T const& y, + typename C::T const& z, + typename C::T const& w + ) + { + return glm::min(glm::min(x, y), glm::min(z, w)); + } + + template class C> + GLM_FUNC_QUALIFIER C min + ( + C const& x, + C const& y, + C const& z, + C const& w + ) + { + return glm::min(glm::min(x, y), glm::min(z, w)); + } + + template + GLM_FUNC_QUALIFIER T max( + T const& x, + T const& y, + T const& z) + { + return glm::max(glm::max(x, y), z); + } + + template class C> + GLM_FUNC_QUALIFIER C max + ( + C const& x, + typename C::T const& y, + typename C::T const& z + ) + { + return glm::max(glm::max(x, y), z); + } + + template class C> + GLM_FUNC_QUALIFIER C max + ( + C const& x, + C const& y, + C const& z + ) + { + return glm::max(glm::max(x, y), z); + } + + template + GLM_FUNC_QUALIFIER T max + ( + T const& x, + T const& y, + T const& z, + T const& w + ) + { + return glm::max(glm::max(x, y), glm::max(z, w)); + } + + template class C> + GLM_FUNC_QUALIFIER C max + ( + C const& x, + typename C::T const& y, + typename C::T const& z, + typename C::T const& w + ) + { + return glm::max(glm::max(x, y), glm::max(z, w)); + } + + template class C> + GLM_FUNC_QUALIFIER C max + ( + C const& x, + C const& y, + C const& z, + C const& w + ) + { + return glm::max(glm::max(x, y), glm::max(z, w)); + } + + // fmin +# if GLM_HAS_CXX11_STL + using std::fmin; +# else + template + GLM_FUNC_QUALIFIER genType fmin(genType x, genType y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point input"); + + if (isnan(x)) + return y; + if (isnan(y)) + return x; + + return min(x, y); + } +# endif + + template + GLM_FUNC_QUALIFIER vec fmin(vec const& a, T b) + { + return detail::functor2::call(fmin, a, vec(b)); + } + + template + GLM_FUNC_QUALIFIER vec fmin(vec const& a, vec const& b) + { + return detail::functor2::call(fmin, a, b); + } + + // fmax +# if GLM_HAS_CXX11_STL + using std::fmax; +# else + template + GLM_FUNC_QUALIFIER genType fmax(genType x, genType y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point input"); + + if (isnan(x)) + return y; + if (isnan(y)) + return x; + + return max(x, y); + } +# endif + + template + GLM_FUNC_QUALIFIER vec fmax(vec const& a, T b) + { + return detail::functor2::call(fmax, a, vec(b)); + } + + template + GLM_FUNC_QUALIFIER vec fmax(vec const& a, vec const& b) + { + return detail::functor2::call(fmax, a, b); + } + + // fclamp + template + GLM_FUNC_QUALIFIER genType fclamp(genType x, genType minVal, genType maxVal) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fclamp' only accept floating-point or integer inputs"); + return fmin(fmax(x, minVal), maxVal); + } + + template + GLM_FUNC_QUALIFIER vec fclamp(vec const& x, T minVal, T maxVal) + { + return fmin(fmax(x, vec(minVal)), vec(maxVal)); + } + + template + GLM_FUNC_QUALIFIER vec fclamp(vec const& x, vec const& minVal, vec const& maxVal) + { + return fmin(fmax(x, minVal), maxVal); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/exterior_product.hpp b/MacroLibX/third_party/glm/gtx/exterior_product.hpp new file mode 100644 index 0000000..5522df7 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/exterior_product.hpp @@ -0,0 +1,45 @@ +/// @ref gtx_exterior_product +/// @file glm/gtx/exterior_product.hpp +/// +/// @see core (dependence) +/// @see gtx_exterior_product (dependence) +/// +/// @defgroup gtx_exterior_product GLM_GTX_exterior_product +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// @brief Allow to perform bit operations on integer values + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_exterior_product is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_exterior_product extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_exterior_product + /// @{ + + /// Returns the cross product of x and y. + /// + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see Exterior product + template + GLM_FUNC_DECL T cross(vec<2, T, Q> const& v, vec<2, T, Q> const& u); + + /// @} +} //namespace glm + +#include "exterior_product.inl" diff --git a/MacroLibX/third_party/glm/gtx/exterior_product.inl b/MacroLibX/third_party/glm/gtx/exterior_product.inl new file mode 100644 index 0000000..93661fd --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/exterior_product.inl @@ -0,0 +1,26 @@ +/// @ref gtx_exterior_product + +#include + +namespace glm { +namespace detail +{ + template + struct compute_cross_vec2 + { + GLM_FUNC_QUALIFIER static T call(vec<2, T, Q> const& v, vec<2, T, Q> const& u) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'cross' accepts only floating-point inputs"); + + return v.x * u.y - u.x * v.y; + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER T cross(vec<2, T, Q> const& x, vec<2, T, Q> const& y) + { + return detail::compute_cross_vec2::value>::call(x, y); + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/gtx/fast_exponential.hpp b/MacroLibX/third_party/glm/gtx/fast_exponential.hpp new file mode 100644 index 0000000..6fb7286 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/fast_exponential.hpp @@ -0,0 +1,95 @@ +/// @ref gtx_fast_exponential +/// @file glm/gtx/fast_exponential.hpp +/// +/// @see core (dependence) +/// @see gtx_half_float (dependence) +/// +/// @defgroup gtx_fast_exponential GLM_GTX_fast_exponential +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Fast but less accurate implementations of exponential based functions. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_fast_exponential is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_fast_exponential extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_fast_exponential + /// @{ + + /// Faster than the common pow function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL genType fastPow(genType x, genType y); + + /// Faster than the common pow function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL vec fastPow(vec const& x, vec const& y); + + /// Faster than the common pow function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL genTypeT fastPow(genTypeT x, genTypeU y); + + /// Faster than the common pow function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL vec fastPow(vec const& x); + + /// Faster than the common exp function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL T fastExp(T x); + + /// Faster than the common exp function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL vec fastExp(vec const& x); + + /// Faster than the common log function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL T fastLog(T x); + + /// Faster than the common exp2 function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL vec fastLog(vec const& x); + + /// Faster than the common exp2 function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL T fastExp2(T x); + + /// Faster than the common exp2 function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL vec fastExp2(vec const& x); + + /// Faster than the common log2 function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL T fastLog2(T x); + + /// Faster than the common log2 function but less accurate. + /// @see gtx_fast_exponential + template + GLM_FUNC_DECL vec fastLog2(vec const& x); + + /// @} +}//namespace glm + +#include "fast_exponential.inl" diff --git a/MacroLibX/third_party/glm/gtx/fast_exponential.inl b/MacroLibX/third_party/glm/gtx/fast_exponential.inl new file mode 100644 index 0000000..f139e50 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/fast_exponential.inl @@ -0,0 +1,136 @@ +/// @ref gtx_fast_exponential + +namespace glm +{ + // fastPow: + template + GLM_FUNC_QUALIFIER genType fastPow(genType x, genType y) + { + return exp(y * log(x)); + } + + template + GLM_FUNC_QUALIFIER vec fastPow(vec const& x, vec const& y) + { + return exp(y * log(x)); + } + + template + GLM_FUNC_QUALIFIER T fastPow(T x, int y) + { + T f = static_cast(1); + for(int i = 0; i < y; ++i) + f *= x; + return f; + } + + template + GLM_FUNC_QUALIFIER vec fastPow(vec const& x, vec const& y) + { + vec Result; + for(length_t i = 0, n = x.length(); i < n; ++i) + Result[i] = fastPow(x[i], y[i]); + return Result; + } + + // fastExp + // Note: This function provides accurate results only for value between -1 and 1, else avoid it. + template + GLM_FUNC_QUALIFIER T fastExp(T x) + { + // This has a better looking and same performance in release mode than the following code. However, in debug mode it's slower. + // return 1.0f + x * (1.0f + x * 0.5f * (1.0f + x * 0.3333333333f * (1.0f + x * 0.25 * (1.0f + x * 0.2f)))); + T x2 = x * x; + T x3 = x2 * x; + T x4 = x3 * x; + T x5 = x4 * x; + return T(1) + x + (x2 * T(0.5)) + (x3 * T(0.1666666667)) + (x4 * T(0.041666667)) + (x5 * T(0.008333333333)); + } + /* // Try to handle all values of float... but often shower than std::exp, glm::floor and the loop kill the performance + GLM_FUNC_QUALIFIER float fastExp(float x) + { + const float e = 2.718281828f; + const float IntegerPart = floor(x); + const float FloatPart = x - IntegerPart; + float z = 1.f; + + for(int i = 0; i < int(IntegerPart); ++i) + z *= e; + + const float x2 = FloatPart * FloatPart; + const float x3 = x2 * FloatPart; + const float x4 = x3 * FloatPart; + const float x5 = x4 * FloatPart; + return z * (1.0f + FloatPart + (x2 * 0.5f) + (x3 * 0.1666666667f) + (x4 * 0.041666667f) + (x5 * 0.008333333333f)); + } + + // Increase accuracy on number bigger that 1 and smaller than -1 but it's not enough for high and negative numbers + GLM_FUNC_QUALIFIER float fastExp(float x) + { + // This has a better looking and same performance in release mode than the following code. However, in debug mode it's slower. + // return 1.0f + x * (1.0f + x * 0.5f * (1.0f + x * 0.3333333333f * (1.0f + x * 0.25 * (1.0f + x * 0.2f)))); + float x2 = x * x; + float x3 = x2 * x; + float x4 = x3 * x; + float x5 = x4 * x; + float x6 = x5 * x; + float x7 = x6 * x; + float x8 = x7 * x; + return 1.0f + x + (x2 * 0.5f) + (x3 * 0.1666666667f) + (x4 * 0.041666667f) + (x5 * 0.008333333333f)+ (x6 * 0.00138888888888f) + (x7 * 0.000198412698f) + (x8 * 0.0000248015873f);; + } + */ + + template + GLM_FUNC_QUALIFIER vec fastExp(vec const& x) + { + return detail::functor1::call(fastExp, x); + } + + // fastLog + template + GLM_FUNC_QUALIFIER genType fastLog(genType x) + { + return std::log(x); + } + + /* Slower than the VC7.1 function... + GLM_FUNC_QUALIFIER float fastLog(float x) + { + float y1 = (x - 1.0f) / (x + 1.0f); + float y2 = y1 * y1; + return 2.0f * y1 * (1.0f + y2 * (0.3333333333f + y2 * (0.2f + y2 * 0.1428571429f))); + } + */ + + template + GLM_FUNC_QUALIFIER vec fastLog(vec const& x) + { + return detail::functor1::call(fastLog, x); + } + + //fastExp2, ln2 = 0.69314718055994530941723212145818f + template + GLM_FUNC_QUALIFIER genType fastExp2(genType x) + { + return fastExp(0.69314718055994530941723212145818f * x); + } + + template + GLM_FUNC_QUALIFIER vec fastExp2(vec const& x) + { + return detail::functor1::call(fastExp2, x); + } + + // fastLog2, ln2 = 0.69314718055994530941723212145818f + template + GLM_FUNC_QUALIFIER genType fastLog2(genType x) + { + return fastLog(x) / 0.69314718055994530941723212145818f; + } + + template + GLM_FUNC_QUALIFIER vec fastLog2(vec const& x) + { + return detail::functor1::call(fastLog2, x); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/fast_square_root.hpp b/MacroLibX/third_party/glm/gtx/fast_square_root.hpp new file mode 100644 index 0000000..9fb3f2f --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/fast_square_root.hpp @@ -0,0 +1,92 @@ +/// @ref gtx_fast_square_root +/// @file glm/gtx/fast_square_root.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_fast_square_root GLM_GTX_fast_square_root +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Fast but less accurate implementations of square root based functions. +/// - Sqrt optimisation based on Newton's method, +/// www.gamedev.net/community/forums/topic.asp?topic id=139956 + +#pragma once + +// Dependency: +#include "../common.hpp" +#include "../exponential.hpp" +#include "../geometric.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_fast_square_root is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_fast_square_root extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_fast_square_root + /// @{ + + /// Faster than the common sqrt function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL genType fastSqrt(genType x); + + /// Faster than the common sqrt function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL vec fastSqrt(vec const& x); + + /// Faster than the common inversesqrt function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL genType fastInverseSqrt(genType x); + + /// Faster than the common inversesqrt function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL vec fastInverseSqrt(vec const& x); + + /// Faster than the common length function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL genType fastLength(genType x); + + /// Faster than the common length function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL T fastLength(vec const& x); + + /// Faster than the common distance function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL genType fastDistance(genType x, genType y); + + /// Faster than the common distance function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL T fastDistance(vec const& x, vec const& y); + + /// Faster than the common normalize function but less accurate. + /// + /// @see gtx_fast_square_root extension. + template + GLM_FUNC_DECL genType fastNormalize(genType const& x); + + /// @} +}// namespace glm + +#include "fast_square_root.inl" diff --git a/MacroLibX/third_party/glm/gtx/fast_square_root.inl b/MacroLibX/third_party/glm/gtx/fast_square_root.inl new file mode 100644 index 0000000..4e6c6de --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/fast_square_root.inl @@ -0,0 +1,75 @@ +/// @ref gtx_fast_square_root + +namespace glm +{ + // fastSqrt + template + GLM_FUNC_QUALIFIER genType fastSqrt(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fastSqrt' only accept floating-point input"); + + return genType(1) / fastInverseSqrt(x); + } + + template + GLM_FUNC_QUALIFIER vec fastSqrt(vec const& x) + { + return detail::functor1::call(fastSqrt, x); + } + + // fastInversesqrt + template + GLM_FUNC_QUALIFIER genType fastInverseSqrt(genType x) + { + return detail::compute_inversesqrt<1, genType, lowp, detail::is_aligned::value>::call(vec<1, genType, lowp>(x)).x; + } + + template + GLM_FUNC_QUALIFIER vec fastInverseSqrt(vec const& x) + { + return detail::compute_inversesqrt::value>::call(x); + } + + // fastLength + template + GLM_FUNC_QUALIFIER genType fastLength(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fastLength' only accept floating-point inputs"); + + return abs(x); + } + + template + GLM_FUNC_QUALIFIER T fastLength(vec const& x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fastLength' only accept floating-point inputs"); + + return fastSqrt(dot(x, x)); + } + + // fastDistance + template + GLM_FUNC_QUALIFIER genType fastDistance(genType x, genType y) + { + return fastLength(y - x); + } + + template + GLM_FUNC_QUALIFIER T fastDistance(vec const& x, vec const& y) + { + return fastLength(y - x); + } + + // fastNormalize + template + GLM_FUNC_QUALIFIER genType fastNormalize(genType x) + { + return x > genType(0) ? genType(1) : -genType(1); + } + + template + GLM_FUNC_QUALIFIER vec fastNormalize(vec const& x) + { + return x * fastInverseSqrt(dot(x, x)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/fast_trigonometry.hpp b/MacroLibX/third_party/glm/gtx/fast_trigonometry.hpp new file mode 100644 index 0000000..2650d6e --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/fast_trigonometry.hpp @@ -0,0 +1,79 @@ +/// @ref gtx_fast_trigonometry +/// @file glm/gtx/fast_trigonometry.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_fast_trigonometry GLM_GTX_fast_trigonometry +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Fast but less accurate implementations of trigonometric functions. + +#pragma once + +// Dependency: +#include "../gtc/constants.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_fast_trigonometry is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_fast_trigonometry extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_fast_trigonometry + /// @{ + + /// Wrap an angle to [0 2pi[ + /// From GLM_GTX_fast_trigonometry extension. + template + GLM_FUNC_DECL T wrapAngle(T angle); + + /// Faster than the common sin function but less accurate. + /// From GLM_GTX_fast_trigonometry extension. + template + GLM_FUNC_DECL T fastSin(T angle); + + /// Faster than the common cos function but less accurate. + /// From GLM_GTX_fast_trigonometry extension. + template + GLM_FUNC_DECL T fastCos(T angle); + + /// Faster than the common tan function but less accurate. + /// Defined between -2pi and 2pi. + /// From GLM_GTX_fast_trigonometry extension. + template + GLM_FUNC_DECL T fastTan(T angle); + + /// Faster than the common asin function but less accurate. + /// Defined between -2pi and 2pi. + /// From GLM_GTX_fast_trigonometry extension. + template + GLM_FUNC_DECL T fastAsin(T angle); + + /// Faster than the common acos function but less accurate. + /// Defined between -2pi and 2pi. + /// From GLM_GTX_fast_trigonometry extension. + template + GLM_FUNC_DECL T fastAcos(T angle); + + /// Faster than the common atan function but less accurate. + /// Defined between -2pi and 2pi. + /// From GLM_GTX_fast_trigonometry extension. + template + GLM_FUNC_DECL T fastAtan(T y, T x); + + /// Faster than the common atan function but less accurate. + /// Defined between -2pi and 2pi. + /// From GLM_GTX_fast_trigonometry extension. + template + GLM_FUNC_DECL T fastAtan(T angle); + + /// @} +}//namespace glm + +#include "fast_trigonometry.inl" diff --git a/MacroLibX/third_party/glm/gtx/fast_trigonometry.inl b/MacroLibX/third_party/glm/gtx/fast_trigonometry.inl new file mode 100644 index 0000000..1a710cb --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/fast_trigonometry.inl @@ -0,0 +1,142 @@ +/// @ref gtx_fast_trigonometry + +namespace glm{ +namespace detail +{ + template + GLM_FUNC_QUALIFIER vec taylorCos(vec const& x) + { + return static_cast(1) + - (x * x) * (1.f / 2.f) + + ((x * x) * (x * x)) * (1.f / 24.f) + - (((x * x) * (x * x)) * (x * x)) * (1.f / 720.f) + + (((x * x) * (x * x)) * ((x * x) * (x * x))) * (1.f / 40320.f); + } + + template + GLM_FUNC_QUALIFIER T cos_52s(T x) + { + T const xx(x * x); + return (T(0.9999932946) + xx * (T(-0.4999124376) + xx * (T(0.0414877472) + xx * T(-0.0012712095)))); + } + + template + GLM_FUNC_QUALIFIER vec cos_52s(vec const& x) + { + return detail::functor1::call(cos_52s, x); + } +}//namespace detail + + // wrapAngle + template + GLM_FUNC_QUALIFIER T wrapAngle(T angle) + { + return abs(mod(angle, two_pi())); + } + + template + GLM_FUNC_QUALIFIER vec wrapAngle(vec const& x) + { + return detail::functor1::call(wrapAngle, x); + } + + // cos + template + GLM_FUNC_QUALIFIER T fastCos(T x) + { + T const angle(wrapAngle(x)); + + if(angle < half_pi()) + return detail::cos_52s(angle); + if(angle < pi()) + return -detail::cos_52s(pi() - angle); + if(angle < (T(3) * half_pi())) + return -detail::cos_52s(angle - pi()); + + return detail::cos_52s(two_pi() - angle); + } + + template + GLM_FUNC_QUALIFIER vec fastCos(vec const& x) + { + return detail::functor1::call(fastCos, x); + } + + // sin + template + GLM_FUNC_QUALIFIER T fastSin(T x) + { + return fastCos(half_pi() - x); + } + + template + GLM_FUNC_QUALIFIER vec fastSin(vec const& x) + { + return detail::functor1::call(fastSin, x); + } + + // tan + template + GLM_FUNC_QUALIFIER T fastTan(T x) + { + return x + (x * x * x * T(0.3333333333)) + (x * x * x * x * x * T(0.1333333333333)) + (x * x * x * x * x * x * x * T(0.0539682539)); + } + + template + GLM_FUNC_QUALIFIER vec fastTan(vec const& x) + { + return detail::functor1::call(fastTan, x); + } + + // asin + template + GLM_FUNC_QUALIFIER T fastAsin(T x) + { + return x + (x * x * x * T(0.166666667)) + (x * x * x * x * x * T(0.075)) + (x * x * x * x * x * x * x * T(0.0446428571)) + (x * x * x * x * x * x * x * x * x * T(0.0303819444));// + (x * x * x * x * x * x * x * x * x * x * x * T(0.022372159)); + } + + template + GLM_FUNC_QUALIFIER vec fastAsin(vec const& x) + { + return detail::functor1::call(fastAsin, x); + } + + // acos + template + GLM_FUNC_QUALIFIER T fastAcos(T x) + { + return T(1.5707963267948966192313216916398) - fastAsin(x); //(PI / 2) + } + + template + GLM_FUNC_QUALIFIER vec fastAcos(vec const& x) + { + return detail::functor1::call(fastAcos, x); + } + + // atan + template + GLM_FUNC_QUALIFIER T fastAtan(T y, T x) + { + T sgn = sign(y) * sign(x); + return abs(fastAtan(y / x)) * sgn; + } + + template + GLM_FUNC_QUALIFIER vec fastAtan(vec const& y, vec const& x) + { + return detail::functor2::call(fastAtan, y, x); + } + + template + GLM_FUNC_QUALIFIER T fastAtan(T x) + { + return x - (x * x * x * T(0.333333333333)) + (x * x * x * x * x * T(0.2)) - (x * x * x * x * x * x * x * T(0.1428571429)) + (x * x * x * x * x * x * x * x * x * T(0.111111111111)) - (x * x * x * x * x * x * x * x * x * x * x * T(0.0909090909)); + } + + template + GLM_FUNC_QUALIFIER vec fastAtan(vec const& x) + { + return detail::functor1::call(fastAtan, x); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/float_notmalize.inl b/MacroLibX/third_party/glm/gtx/float_notmalize.inl new file mode 100644 index 0000000..8cdbc5a --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/float_notmalize.inl @@ -0,0 +1,13 @@ +/// @ref gtx_float_normalize + +#include + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec floatNormalize(vec const& v) + { + return vec(v) / static_cast(std::numeric_limits::max()); + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/functions.hpp b/MacroLibX/third_party/glm/gtx/functions.hpp new file mode 100644 index 0000000..9f4166c --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/functions.hpp @@ -0,0 +1,56 @@ +/// @ref gtx_functions +/// @file glm/gtx/functions.hpp +/// +/// @see core (dependence) +/// @see gtc_quaternion (dependence) +/// +/// @defgroup gtx_functions GLM_GTX_functions +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// List of useful common functions. + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" +#include "../detail/qualifier.hpp" +#include "../detail/type_vec2.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_functions is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_functions extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_functions + /// @{ + + /// 1D gauss function + /// + /// @see gtc_epsilon + template + GLM_FUNC_DECL T gauss( + T x, + T ExpectedValue, + T StandardDeviation); + + /// 2D gauss function + /// + /// @see gtc_epsilon + template + GLM_FUNC_DECL T gauss( + vec<2, T, Q> const& Coord, + vec<2, T, Q> const& ExpectedValue, + vec<2, T, Q> const& StandardDeviation); + + /// @} +}//namespace glm + +#include "functions.inl" + diff --git a/MacroLibX/third_party/glm/gtx/functions.inl b/MacroLibX/third_party/glm/gtx/functions.inl new file mode 100644 index 0000000..29cbb20 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/functions.inl @@ -0,0 +1,30 @@ +/// @ref gtx_functions + +#include "../exponential.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER T gauss + ( + T x, + T ExpectedValue, + T StandardDeviation + ) + { + return exp(-((x - ExpectedValue) * (x - ExpectedValue)) / (static_cast(2) * StandardDeviation * StandardDeviation)) / (StandardDeviation * sqrt(static_cast(6.28318530717958647692528676655900576))); + } + + template + GLM_FUNC_QUALIFIER T gauss + ( + vec<2, T, Q> const& Coord, + vec<2, T, Q> const& ExpectedValue, + vec<2, T, Q> const& StandardDeviation + ) + { + vec<2, T, Q> const Squared = ((Coord - ExpectedValue) * (Coord - ExpectedValue)) / (static_cast(2) * StandardDeviation * StandardDeviation); + return exp(-(Squared.x + Squared.y)); + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/gtx/gradient_paint.hpp b/MacroLibX/third_party/glm/gtx/gradient_paint.hpp new file mode 100644 index 0000000..6f85bf4 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/gradient_paint.hpp @@ -0,0 +1,53 @@ +/// @ref gtx_gradient_paint +/// @file glm/gtx/gradient_paint.hpp +/// +/// @see core (dependence) +/// @see gtx_optimum_pow (dependence) +/// +/// @defgroup gtx_gradient_paint GLM_GTX_gradient_paint +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Functions that return the color of procedural gradient for specific coordinates. + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtx/optimum_pow.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_gradient_paint is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_gradient_paint extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_gradient_paint + /// @{ + + /// Return a color from a radial gradient. + /// @see - gtx_gradient_paint + template + GLM_FUNC_DECL T radialGradient( + vec<2, T, Q> const& Center, + T const& Radius, + vec<2, T, Q> const& Focal, + vec<2, T, Q> const& Position); + + /// Return a color from a linear gradient. + /// @see - gtx_gradient_paint + template + GLM_FUNC_DECL T linearGradient( + vec<2, T, Q> const& Point0, + vec<2, T, Q> const& Point1, + vec<2, T, Q> const& Position); + + /// @} +}// namespace glm + +#include "gradient_paint.inl" diff --git a/MacroLibX/third_party/glm/gtx/gradient_paint.inl b/MacroLibX/third_party/glm/gtx/gradient_paint.inl new file mode 100644 index 0000000..4c495e6 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/gradient_paint.inl @@ -0,0 +1,36 @@ +/// @ref gtx_gradient_paint + +namespace glm +{ + template + GLM_FUNC_QUALIFIER T radialGradient + ( + vec<2, T, Q> const& Center, + T const& Radius, + vec<2, T, Q> const& Focal, + vec<2, T, Q> const& Position + ) + { + vec<2, T, Q> F = Focal - Center; + vec<2, T, Q> D = Position - Focal; + T Radius2 = pow2(Radius); + T Fx2 = pow2(F.x); + T Fy2 = pow2(F.y); + + T Numerator = (D.x * F.x + D.y * F.y) + sqrt(Radius2 * (pow2(D.x) + pow2(D.y)) - pow2(D.x * F.y - D.y * F.x)); + T Denominator = Radius2 - (Fx2 + Fy2); + return Numerator / Denominator; + } + + template + GLM_FUNC_QUALIFIER T linearGradient + ( + vec<2, T, Q> const& Point0, + vec<2, T, Q> const& Point1, + vec<2, T, Q> const& Position + ) + { + vec<2, T, Q> Dist = Point1 - Point0; + return (Dist.x * (Position.x - Point0.x) + Dist.y * (Position.y - Point0.y)) / glm::dot(Dist, Dist); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/handed_coordinate_space.hpp b/MacroLibX/third_party/glm/gtx/handed_coordinate_space.hpp new file mode 100644 index 0000000..3c85968 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/handed_coordinate_space.hpp @@ -0,0 +1,50 @@ +/// @ref gtx_handed_coordinate_space +/// @file glm/gtx/handed_coordinate_space.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_handed_coordinate_space GLM_GTX_handed_coordinate_space +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// To know if a set of three basis vectors defines a right or left-handed coordinate system. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_handed_coordinate_space is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_handed_coordinate_space extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_handed_coordinate_space + /// @{ + + //! Return if a trihedron right handed or not. + //! From GLM_GTX_handed_coordinate_space extension. + template + GLM_FUNC_DECL bool rightHanded( + vec<3, T, Q> const& tangent, + vec<3, T, Q> const& binormal, + vec<3, T, Q> const& normal); + + //! Return if a trihedron left handed or not. + //! From GLM_GTX_handed_coordinate_space extension. + template + GLM_FUNC_DECL bool leftHanded( + vec<3, T, Q> const& tangent, + vec<3, T, Q> const& binormal, + vec<3, T, Q> const& normal); + + /// @} +}// namespace glm + +#include "handed_coordinate_space.inl" diff --git a/MacroLibX/third_party/glm/gtx/handed_coordinate_space.inl b/MacroLibX/third_party/glm/gtx/handed_coordinate_space.inl new file mode 100644 index 0000000..e43c17b --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/handed_coordinate_space.inl @@ -0,0 +1,26 @@ +/// @ref gtx_handed_coordinate_space + +namespace glm +{ + template + GLM_FUNC_QUALIFIER bool rightHanded + ( + vec<3, T, Q> const& tangent, + vec<3, T, Q> const& binormal, + vec<3, T, Q> const& normal + ) + { + return dot(cross(normal, tangent), binormal) > T(0); + } + + template + GLM_FUNC_QUALIFIER bool leftHanded + ( + vec<3, T, Q> const& tangent, + vec<3, T, Q> const& binormal, + vec<3, T, Q> const& normal + ) + { + return dot(cross(normal, tangent), binormal) < T(0); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/hash.hpp b/MacroLibX/third_party/glm/gtx/hash.hpp new file mode 100644 index 0000000..05dae9f --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/hash.hpp @@ -0,0 +1,142 @@ +/// @ref gtx_hash +/// @file glm/gtx/hash.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_hash GLM_GTX_hash +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Add std::hash support for glm types + +#pragma once + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_hash is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_hash extension included") +# endif +#endif + +#include + +#include "../vec2.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" +#include "../gtc/vec1.hpp" + +#include "../gtc/quaternion.hpp" +#include "../gtx/dual_quaternion.hpp" + +#include "../mat2x2.hpp" +#include "../mat2x3.hpp" +#include "../mat2x4.hpp" + +#include "../mat3x2.hpp" +#include "../mat3x3.hpp" +#include "../mat3x4.hpp" + +#include "../mat4x2.hpp" +#include "../mat4x3.hpp" +#include "../mat4x4.hpp" + +#if !GLM_HAS_CXX11_STL +# error "GLM_GTX_hash requires C++11 standard library support" +#endif + +namespace std +{ + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::vec<1, T, Q> const& v) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::vec<2, T, Q> const& v) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::vec<3, T, Q> const& v) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::vec<4, T, Q> const& v) const; + }; + + template + struct hash> + { + GLM_FUNC_DECL size_t operator()(glm::qua const& q) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::tdualquat const& q) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<2, 2, T,Q> const& m) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<2, 3, T,Q> const& m) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<2, 4, T,Q> const& m) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<3, 2, T,Q> const& m) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<3, 3, T,Q> const& m) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<3, 4, T,Q> const& m) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<4, 2, T,Q> const& m) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<4, 3, T,Q> const& m) const; + }; + + template + struct hash > + { + GLM_FUNC_DECL size_t operator()(glm::mat<4, 4, T,Q> const& m) const; + }; +} // namespace std + +#include "hash.inl" diff --git a/MacroLibX/third_party/glm/gtx/hash.inl b/MacroLibX/third_party/glm/gtx/hash.inl new file mode 100644 index 0000000..ff71ca9 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/hash.inl @@ -0,0 +1,184 @@ +/// @ref gtx_hash +/// +/// @see core (dependence) +/// +/// @defgroup gtx_hash GLM_GTX_hash +/// @ingroup gtx +/// +/// @brief Add std::hash support for glm types +/// +/// need to be included to use the features of this extension. + +namespace glm { +namespace detail +{ + GLM_INLINE void hash_combine(size_t &seed, size_t hash) + { + hash += 0x9e3779b9 + (seed << 6) + (seed >> 2); + seed ^= hash; + } +}} + +namespace std +{ + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::vec<1, T, Q> const& v) const + { + hash hasher; + return hasher(v.x); + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::vec<2, T, Q> const& v) const + { + size_t seed = 0; + hash hasher; + glm::detail::hash_combine(seed, hasher(v.x)); + glm::detail::hash_combine(seed, hasher(v.y)); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::vec<3, T, Q> const& v) const + { + size_t seed = 0; + hash hasher; + glm::detail::hash_combine(seed, hasher(v.x)); + glm::detail::hash_combine(seed, hasher(v.y)); + glm::detail::hash_combine(seed, hasher(v.z)); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::vec<4, T, Q> const& v) const + { + size_t seed = 0; + hash hasher; + glm::detail::hash_combine(seed, hasher(v.x)); + glm::detail::hash_combine(seed, hasher(v.y)); + glm::detail::hash_combine(seed, hasher(v.z)); + glm::detail::hash_combine(seed, hasher(v.w)); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::qua const& q) const + { + size_t seed = 0; + hash hasher; + glm::detail::hash_combine(seed, hasher(q.x)); + glm::detail::hash_combine(seed, hasher(q.y)); + glm::detail::hash_combine(seed, hasher(q.z)); + glm::detail::hash_combine(seed, hasher(q.w)); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::tdualquat const& q) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(q.real)); + glm::detail::hash_combine(seed, hasher(q.dual)); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<2, 2, T, Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<2, 3, T, Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<2, 4, T, Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<3, 2, T, Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + glm::detail::hash_combine(seed, hasher(m[2])); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<3, 3, T, Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + glm::detail::hash_combine(seed, hasher(m[2])); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<3, 4, T, Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + glm::detail::hash_combine(seed, hasher(m[2])); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<4, 2, T,Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + glm::detail::hash_combine(seed, hasher(m[2])); + glm::detail::hash_combine(seed, hasher(m[3])); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<4, 3, T,Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + glm::detail::hash_combine(seed, hasher(m[2])); + glm::detail::hash_combine(seed, hasher(m[3])); + return seed; + } + + template + GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<4, 4, T, Q> const& m) const + { + size_t seed = 0; + hash> hasher; + glm::detail::hash_combine(seed, hasher(m[0])); + glm::detail::hash_combine(seed, hasher(m[1])); + glm::detail::hash_combine(seed, hasher(m[2])); + glm::detail::hash_combine(seed, hasher(m[3])); + return seed; + } +} diff --git a/MacroLibX/third_party/glm/gtx/integer.hpp b/MacroLibX/third_party/glm/gtx/integer.hpp new file mode 100644 index 0000000..d0b4c61 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/integer.hpp @@ -0,0 +1,76 @@ +/// @ref gtx_integer +/// @file glm/gtx/integer.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_integer GLM_GTX_integer +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Add support for integer for core functions + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/integer.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_integer is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_integer extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_integer + /// @{ + + //! Returns x raised to the y power. + //! From GLM_GTX_integer extension. + GLM_FUNC_DECL int pow(int x, uint y); + + //! Returns the positive square root of x. + //! From GLM_GTX_integer extension. + GLM_FUNC_DECL int sqrt(int x); + + //! Returns the floor log2 of x. + //! From GLM_GTX_integer extension. + GLM_FUNC_DECL unsigned int floor_log2(unsigned int x); + + //! Modulus. Returns x - y * floor(x / y) for each component in x using the floating point value y. + //! From GLM_GTX_integer extension. + GLM_FUNC_DECL int mod(int x, int y); + + //! Return the factorial value of a number (!12 max, integer only) + //! From GLM_GTX_integer extension. + template + GLM_FUNC_DECL genType factorial(genType const& x); + + //! 32bit signed integer. + //! From GLM_GTX_integer extension. + typedef signed int sint; + + //! Returns x raised to the y power. + //! From GLM_GTX_integer extension. + GLM_FUNC_DECL uint pow(uint x, uint y); + + //! Returns the positive square root of x. + //! From GLM_GTX_integer extension. + GLM_FUNC_DECL uint sqrt(uint x); + + //! Modulus. Returns x - y * floor(x / y) for each component in x using the floating point value y. + //! From GLM_GTX_integer extension. + GLM_FUNC_DECL uint mod(uint x, uint y); + + //! Returns the number of leading zeros. + //! From GLM_GTX_integer extension. + GLM_FUNC_DECL uint nlz(uint x); + + /// @} +}//namespace glm + +#include "integer.inl" diff --git a/MacroLibX/third_party/glm/gtx/integer.inl b/MacroLibX/third_party/glm/gtx/integer.inl new file mode 100644 index 0000000..956366b --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/integer.inl @@ -0,0 +1,185 @@ +/// @ref gtx_integer + +namespace glm +{ + // pow + GLM_FUNC_QUALIFIER int pow(int x, uint y) + { + if(y == 0) + return x >= 0 ? 1 : -1; + + int result = x; + for(uint i = 1; i < y; ++i) + result *= x; + return result; + } + + // sqrt: From Christopher J. Musial, An integer square root, Graphics Gems, 1990, page 387 + GLM_FUNC_QUALIFIER int sqrt(int x) + { + if(x <= 1) return x; + + int NextTrial = x >> 1; + int CurrentAnswer; + + do + { + CurrentAnswer = NextTrial; + NextTrial = (NextTrial + x / NextTrial) >> 1; + } while(NextTrial < CurrentAnswer); + + return CurrentAnswer; + } + +// Henry Gordon Dietz: http://aggregate.org/MAGIC/ +namespace detail +{ + GLM_FUNC_QUALIFIER unsigned int ones32(unsigned int x) + { + /* 32-bit recursive reduction using SWAR... + but first step is mapping 2-bit values + into sum of 2 1-bit values in sneaky way + */ + x -= ((x >> 1) & 0x55555555); + x = (((x >> 2) & 0x33333333) + (x & 0x33333333)); + x = (((x >> 4) + x) & 0x0f0f0f0f); + x += (x >> 8); + x += (x >> 16); + return(x & 0x0000003f); + } +}//namespace detail + + // Henry Gordon Dietz: http://aggregate.org/MAGIC/ +/* + GLM_FUNC_QUALIFIER unsigned int floor_log2(unsigned int x) + { + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + + return _detail::ones32(x) >> 1; + } +*/ + // mod + GLM_FUNC_QUALIFIER int mod(int x, int y) + { + return ((x % y) + y) % y; + } + + // factorial (!12 max, integer only) + template + GLM_FUNC_QUALIFIER genType factorial(genType const& x) + { + genType Temp = x; + genType Result; + for(Result = 1; Temp > 1; --Temp) + Result *= Temp; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<2, T, Q> factorial( + vec<2, T, Q> const& x) + { + return vec<2, T, Q>( + factorial(x.x), + factorial(x.y)); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> factorial( + vec<3, T, Q> const& x) + { + return vec<3, T, Q>( + factorial(x.x), + factorial(x.y), + factorial(x.z)); + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> factorial( + vec<4, T, Q> const& x) + { + return vec<4, T, Q>( + factorial(x.x), + factorial(x.y), + factorial(x.z), + factorial(x.w)); + } + + GLM_FUNC_QUALIFIER uint pow(uint x, uint y) + { + if (y == 0) + return 1u; + + uint result = x; + for(uint i = 1; i < y; ++i) + result *= x; + return result; + } + + GLM_FUNC_QUALIFIER uint sqrt(uint x) + { + if(x <= 1) return x; + + uint NextTrial = x >> 1; + uint CurrentAnswer; + + do + { + CurrentAnswer = NextTrial; + NextTrial = (NextTrial + x / NextTrial) >> 1; + } while(NextTrial < CurrentAnswer); + + return CurrentAnswer; + } + + GLM_FUNC_QUALIFIER uint mod(uint x, uint y) + { + return x - y * (x / y); + } + +#if(GLM_COMPILER & (GLM_COMPILER_VC | GLM_COMPILER_GCC)) + + GLM_FUNC_QUALIFIER unsigned int nlz(unsigned int x) + { + return 31u - findMSB(x); + } + +#else + + // Hackers Delight: http://www.hackersdelight.org/HDcode/nlz.c.txt + GLM_FUNC_QUALIFIER unsigned int nlz(unsigned int x) + { + int y, m, n; + + y = -int(x >> 16); // If left half of x is 0, + m = (y >> 16) & 16; // set n = 16. If left half + n = 16 - m; // is nonzero, set n = 0 and + x = x >> m; // shift x right 16. + // Now x is of the form 0000xxxx. + y = x - 0x100; // If positions 8-15 are 0, + m = (y >> 16) & 8; // add 8 to n and shift x left 8. + n = n + m; + x = x << m; + + y = x - 0x1000; // If positions 12-15 are 0, + m = (y >> 16) & 4; // add 4 to n and shift x left 4. + n = n + m; + x = x << m; + + y = x - 0x4000; // If positions 14-15 are 0, + m = (y >> 16) & 2; // add 2 to n and shift x left 2. + n = n + m; + x = x << m; + + y = x >> 14; // Set y = 0, 1, 2, or 3. + m = y & ~(y >> 1); // Set m = 0, 1, 2, or 2 resp. + return unsigned(n + 2 - m); + } + +#endif//(GLM_COMPILER) + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/intersect.hpp b/MacroLibX/third_party/glm/gtx/intersect.hpp new file mode 100644 index 0000000..3c78f2b --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/intersect.hpp @@ -0,0 +1,92 @@ +/// @ref gtx_intersect +/// @file glm/gtx/intersect.hpp +/// +/// @see core (dependence) +/// @see gtx_closest_point (dependence) +/// +/// @defgroup gtx_intersect GLM_GTX_intersect +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Add intersection functions + +#pragma once + +// Dependency: +#include +#include +#include "../glm.hpp" +#include "../geometric.hpp" +#include "../gtx/closest_point.hpp" +#include "../gtx/vector_query.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_closest_point is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_closest_point extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_intersect + /// @{ + + //! Compute the intersection of a ray and a plane. + //! Ray direction and plane normal must be unit length. + //! From GLM_GTX_intersect extension. + template + GLM_FUNC_DECL bool intersectRayPlane( + genType const& orig, genType const& dir, + genType const& planeOrig, genType const& planeNormal, + typename genType::value_type & intersectionDistance); + + //! Compute the intersection of a ray and a triangle. + /// Based om Tomas Möller implementation http://fileadmin.cs.lth.se/cs/Personal/Tomas_Akenine-Moller/raytri/ + //! From GLM_GTX_intersect extension. + template + GLM_FUNC_DECL bool intersectRayTriangle( + vec<3, T, Q> const& orig, vec<3, T, Q> const& dir, + vec<3, T, Q> const& v0, vec<3, T, Q> const& v1, vec<3, T, Q> const& v2, + vec<2, T, Q>& baryPosition, T& distance); + + //! Compute the intersection of a line and a triangle. + //! From GLM_GTX_intersect extension. + template + GLM_FUNC_DECL bool intersectLineTriangle( + genType const& orig, genType const& dir, + genType const& vert0, genType const& vert1, genType const& vert2, + genType & position); + + //! Compute the intersection distance of a ray and a sphere. + //! The ray direction vector is unit length. + //! From GLM_GTX_intersect extension. + template + GLM_FUNC_DECL bool intersectRaySphere( + genType const& rayStarting, genType const& rayNormalizedDirection, + genType const& sphereCenter, typename genType::value_type const sphereRadiusSquered, + typename genType::value_type & intersectionDistance); + + //! Compute the intersection of a ray and a sphere. + //! From GLM_GTX_intersect extension. + template + GLM_FUNC_DECL bool intersectRaySphere( + genType const& rayStarting, genType const& rayNormalizedDirection, + genType const& sphereCenter, const typename genType::value_type sphereRadius, + genType & intersectionPosition, genType & intersectionNormal); + + //! Compute the intersection of a line and a sphere. + //! From GLM_GTX_intersect extension + template + GLM_FUNC_DECL bool intersectLineSphere( + genType const& point0, genType const& point1, + genType const& sphereCenter, typename genType::value_type sphereRadius, + genType & intersectionPosition1, genType & intersectionNormal1, + genType & intersectionPosition2 = genType(), genType & intersectionNormal2 = genType()); + + /// @} +}//namespace glm + +#include "intersect.inl" diff --git a/MacroLibX/third_party/glm/gtx/intersect.inl b/MacroLibX/third_party/glm/gtx/intersect.inl new file mode 100644 index 0000000..54ecb4d --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/intersect.inl @@ -0,0 +1,200 @@ +/// @ref gtx_intersect + +namespace glm +{ + template + GLM_FUNC_QUALIFIER bool intersectRayPlane + ( + genType const& orig, genType const& dir, + genType const& planeOrig, genType const& planeNormal, + typename genType::value_type & intersectionDistance + ) + { + typename genType::value_type d = glm::dot(dir, planeNormal); + typename genType::value_type Epsilon = std::numeric_limits::epsilon(); + + if(glm::abs(d) > Epsilon) // if dir and planeNormal are not perpendicular + { + typename genType::value_type const tmp_intersectionDistance = glm::dot(planeOrig - orig, planeNormal) / d; + if (tmp_intersectionDistance > static_cast(0)) { // allow only intersections + intersectionDistance = tmp_intersectionDistance; + return true; + } + } + + return false; + } + + template + GLM_FUNC_QUALIFIER bool intersectRayTriangle + ( + vec<3, T, Q> const& orig, vec<3, T, Q> const& dir, + vec<3, T, Q> const& vert0, vec<3, T, Q> const& vert1, vec<3, T, Q> const& vert2, + vec<2, T, Q>& baryPosition, T& distance + ) + { + // find vectors for two edges sharing vert0 + vec<3, T, Q> const edge1 = vert1 - vert0; + vec<3, T, Q> const edge2 = vert2 - vert0; + + // begin calculating determinant - also used to calculate U parameter + vec<3, T, Q> const p = glm::cross(dir, edge2); + + // if determinant is near zero, ray lies in plane of triangle + T const det = glm::dot(edge1, p); + + vec<3, T, Q> Perpendicular(0); + + if(det > std::numeric_limits::epsilon()) + { + // calculate distance from vert0 to ray origin + vec<3, T, Q> const dist = orig - vert0; + + // calculate U parameter and test bounds + baryPosition.x = glm::dot(dist, p); + if(baryPosition.x < static_cast(0) || baryPosition.x > det) + return false; + + // prepare to test V parameter + Perpendicular = glm::cross(dist, edge1); + + // calculate V parameter and test bounds + baryPosition.y = glm::dot(dir, Perpendicular); + if((baryPosition.y < static_cast(0)) || ((baryPosition.x + baryPosition.y) > det)) + return false; + } + else if(det < -std::numeric_limits::epsilon()) + { + // calculate distance from vert0 to ray origin + vec<3, T, Q> const dist = orig - vert0; + + // calculate U parameter and test bounds + baryPosition.x = glm::dot(dist, p); + if((baryPosition.x > static_cast(0)) || (baryPosition.x < det)) + return false; + + // prepare to test V parameter + Perpendicular = glm::cross(dist, edge1); + + // calculate V parameter and test bounds + baryPosition.y = glm::dot(dir, Perpendicular); + if((baryPosition.y > static_cast(0)) || (baryPosition.x + baryPosition.y < det)) + return false; + } + else + return false; // ray is parallel to the plane of the triangle + + T inv_det = static_cast(1) / det; + + // calculate distance, ray intersects triangle + distance = glm::dot(edge2, Perpendicular) * inv_det; + baryPosition *= inv_det; + + return true; + } + + template + GLM_FUNC_QUALIFIER bool intersectLineTriangle + ( + genType const& orig, genType const& dir, + genType const& vert0, genType const& vert1, genType const& vert2, + genType & position + ) + { + typename genType::value_type Epsilon = std::numeric_limits::epsilon(); + + genType edge1 = vert1 - vert0; + genType edge2 = vert2 - vert0; + + genType Perpendicular = cross(dir, edge2); + + float det = dot(edge1, Perpendicular); + + if (det > -Epsilon && det < Epsilon) + return false; + typename genType::value_type inv_det = typename genType::value_type(1) / det; + + genType Tengant = orig - vert0; + + position.y = dot(Tengant, Perpendicular) * inv_det; + if (position.y < typename genType::value_type(0) || position.y > typename genType::value_type(1)) + return false; + + genType Cotengant = cross(Tengant, edge1); + + position.z = dot(dir, Cotengant) * inv_det; + if (position.z < typename genType::value_type(0) || position.y + position.z > typename genType::value_type(1)) + return false; + + position.x = dot(edge2, Cotengant) * inv_det; + + return true; + } + + template + GLM_FUNC_QUALIFIER bool intersectRaySphere + ( + genType const& rayStarting, genType const& rayNormalizedDirection, + genType const& sphereCenter, const typename genType::value_type sphereRadiusSquered, + typename genType::value_type & intersectionDistance + ) + { + typename genType::value_type Epsilon = std::numeric_limits::epsilon(); + genType diff = sphereCenter - rayStarting; + typename genType::value_type t0 = dot(diff, rayNormalizedDirection); + typename genType::value_type dSquared = dot(diff, diff) - t0 * t0; + if( dSquared > sphereRadiusSquered ) + { + return false; + } + typename genType::value_type t1 = sqrt( sphereRadiusSquered - dSquared ); + intersectionDistance = t0 > t1 + Epsilon ? t0 - t1 : t0 + t1; + return intersectionDistance > Epsilon; + } + + template + GLM_FUNC_QUALIFIER bool intersectRaySphere + ( + genType const& rayStarting, genType const& rayNormalizedDirection, + genType const& sphereCenter, const typename genType::value_type sphereRadius, + genType & intersectionPosition, genType & intersectionNormal + ) + { + typename genType::value_type distance; + if( intersectRaySphere( rayStarting, rayNormalizedDirection, sphereCenter, sphereRadius * sphereRadius, distance ) ) + { + intersectionPosition = rayStarting + rayNormalizedDirection * distance; + intersectionNormal = (intersectionPosition - sphereCenter) / sphereRadius; + return true; + } + return false; + } + + template + GLM_FUNC_QUALIFIER bool intersectLineSphere + ( + genType const& point0, genType const& point1, + genType const& sphereCenter, typename genType::value_type sphereRadius, + genType & intersectionPoint1, genType & intersectionNormal1, + genType & intersectionPoint2, genType & intersectionNormal2 + ) + { + typename genType::value_type Epsilon = std::numeric_limits::epsilon(); + genType dir = normalize(point1 - point0); + genType diff = sphereCenter - point0; + typename genType::value_type t0 = dot(diff, dir); + typename genType::value_type dSquared = dot(diff, diff) - t0 * t0; + if( dSquared > sphereRadius * sphereRadius ) + { + return false; + } + typename genType::value_type t1 = sqrt( sphereRadius * sphereRadius - dSquared ); + if( t0 < t1 + Epsilon ) + t1 = -t1; + intersectionPoint1 = point0 + dir * (t0 - t1); + intersectionNormal1 = (intersectionPoint1 - sphereCenter) / sphereRadius; + intersectionPoint2 = point0 + dir * (t0 + t1); + intersectionNormal2 = (intersectionPoint2 - sphereCenter) / sphereRadius; + return true; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/io.hpp b/MacroLibX/third_party/glm/gtx/io.hpp new file mode 100644 index 0000000..8d974f0 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/io.hpp @@ -0,0 +1,201 @@ +/// @ref gtx_io +/// @file glm/gtx/io.hpp +/// @author Jan P Springer (regnirpsj@gmail.com) +/// +/// @see core (dependence) +/// @see gtc_matrix_access (dependence) +/// @see gtc_quaternion (dependence) +/// +/// @defgroup gtx_io GLM_GTX_io +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// std::[w]ostream support for glm types +/// +/// std::[w]ostream support for glm types + qualifier/width/etc. manipulators +/// based on howard hinnant's std::chrono io proposal +/// [http://home.roadrunner.com/~hinnant/bloomington/chrono_io.html] + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtx/quaternion.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_io is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_io extension included") +# endif +#endif + +#include // std::basic_ostream<> (fwd) +#include // std::locale, std::locale::facet, std::locale::id +#include // std::pair<> + +namespace glm +{ + /// @addtogroup gtx_io + /// @{ + + namespace io + { + enum order_type { column_major, row_major}; + + template + class format_punct : public std::locale::facet + { + typedef CTy char_type; + + public: + + static std::locale::id id; + + bool formatted; + unsigned precision; + unsigned width; + char_type separator; + char_type delim_left; + char_type delim_right; + char_type space; + char_type newline; + order_type order; + + GLM_FUNC_DECL explicit format_punct(size_t a = 0); + GLM_FUNC_DECL explicit format_punct(format_punct const&); + }; + + template > + class basic_state_saver { + + public: + + GLM_FUNC_DECL explicit basic_state_saver(std::basic_ios&); + GLM_FUNC_DECL ~basic_state_saver(); + + private: + + typedef ::std::basic_ios state_type; + typedef typename state_type::char_type char_type; + typedef ::std::ios_base::fmtflags flags_type; + typedef ::std::streamsize streamsize_type; + typedef ::std::locale const locale_type; + + state_type& state_; + flags_type flags_; + streamsize_type precision_; + streamsize_type width_; + char_type fill_; + locale_type locale_; + + GLM_FUNC_DECL basic_state_saver& operator=(basic_state_saver const&); + }; + + typedef basic_state_saver state_saver; + typedef basic_state_saver wstate_saver; + + template > + class basic_format_saver + { + public: + + GLM_FUNC_DECL explicit basic_format_saver(std::basic_ios&); + GLM_FUNC_DECL ~basic_format_saver(); + + private: + + basic_state_saver const bss_; + + GLM_FUNC_DECL basic_format_saver& operator=(basic_format_saver const&); + }; + + typedef basic_format_saver format_saver; + typedef basic_format_saver wformat_saver; + + struct precision + { + unsigned value; + + GLM_FUNC_DECL explicit precision(unsigned); + }; + + struct width + { + unsigned value; + + GLM_FUNC_DECL explicit width(unsigned); + }; + + template + struct delimeter + { + CTy value[3]; + + GLM_FUNC_DECL explicit delimeter(CTy /* left */, CTy /* right */, CTy /* separator */ = ','); + }; + + struct order + { + order_type value; + + GLM_FUNC_DECL explicit order(order_type); + }; + + // functions, inlined (inline) + + template + FTy const& get_facet(std::basic_ios&); + template + std::basic_ios& formatted(std::basic_ios&); + template + std::basic_ios& unformattet(std::basic_ios&); + + template + std::basic_ostream& operator<<(std::basic_ostream&, precision const&); + template + std::basic_ostream& operator<<(std::basic_ostream&, width const&); + template + std::basic_ostream& operator<<(std::basic_ostream&, delimeter const&); + template + std::basic_ostream& operator<<(std::basic_ostream&, order const&); + }//namespace io + + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, qua const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, vec<1, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, vec<2, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, vec<3, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, vec<4, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<2, 2, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<2, 3, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<2, 4, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<3, 2, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<3, 3, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<3, 4, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<4, 2, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<4, 3, T, Q> const&); + template + GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<4, 4, T, Q> const&); + + template + GLM_FUNC_DECL std::basic_ostream & operator<<(std::basic_ostream &, + std::pair const, mat<4, 4, T, Q> const> const&); + + /// @} +}//namespace glm + +#include "io.inl" diff --git a/MacroLibX/third_party/glm/gtx/io.inl b/MacroLibX/third_party/glm/gtx/io.inl new file mode 100644 index 0000000..a3a1bb6 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/io.inl @@ -0,0 +1,440 @@ +/// @ref gtx_io +/// @author Jan P Springer (regnirpsj@gmail.com) + +#include // std::fixed, std::setfill<>, std::setprecision, std::right, std::setw +#include // std::basic_ostream<> +#include "../gtc/matrix_access.hpp" // glm::col, glm::row +#include "../gtx/type_trait.hpp" // glm::type<> + +namespace glm{ +namespace io +{ + template + GLM_FUNC_QUALIFIER format_punct::format_punct(size_t a) + : std::locale::facet(a) + , formatted(true) + , precision(3) + , width(1 + 4 + 1 + precision) + , separator(',') + , delim_left('[') + , delim_right(']') + , space(' ') + , newline('\n') + , order(column_major) + {} + + template + GLM_FUNC_QUALIFIER format_punct::format_punct(format_punct const& a) + : std::locale::facet(0) + , formatted(a.formatted) + , precision(a.precision) + , width(a.width) + , separator(a.separator) + , delim_left(a.delim_left) + , delim_right(a.delim_right) + , space(a.space) + , newline(a.newline) + , order(a.order) + {} + + template std::locale::id format_punct::id; + + template + GLM_FUNC_QUALIFIER basic_state_saver::basic_state_saver(std::basic_ios& a) + : state_(a) + , flags_(a.flags()) + , precision_(a.precision()) + , width_(a.width()) + , fill_(a.fill()) + , locale_(a.getloc()) + {} + + template + GLM_FUNC_QUALIFIER basic_state_saver::~basic_state_saver() + { + state_.imbue(locale_); + state_.fill(fill_); + state_.width(width_); + state_.precision(precision_); + state_.flags(flags_); + } + + template + GLM_FUNC_QUALIFIER basic_format_saver::basic_format_saver(std::basic_ios& a) + : bss_(a) + { + a.imbue(std::locale(a.getloc(), new format_punct(get_facet >(a)))); + } + + template + GLM_FUNC_QUALIFIER + basic_format_saver::~basic_format_saver() + {} + + GLM_FUNC_QUALIFIER precision::precision(unsigned a) + : value(a) + {} + + GLM_FUNC_QUALIFIER width::width(unsigned a) + : value(a) + {} + + template + GLM_FUNC_QUALIFIER delimeter::delimeter(CTy a, CTy b, CTy c) + : value() + { + value[0] = a; + value[1] = b; + value[2] = c; + } + + GLM_FUNC_QUALIFIER order::order(order_type a) + : value(a) + {} + + template + GLM_FUNC_QUALIFIER FTy const& get_facet(std::basic_ios& ios) + { + if(!std::has_facet(ios.getloc())) + ios.imbue(std::locale(ios.getloc(), new FTy)); + + return std::use_facet(ios.getloc()); + } + + template + GLM_FUNC_QUALIFIER std::basic_ios& formatted(std::basic_ios& ios) + { + const_cast&>(get_facet >(ios)).formatted = true; + return ios; + } + + template + GLM_FUNC_QUALIFIER std::basic_ios& unformatted(std::basic_ios& ios) + { + const_cast&>(get_facet >(ios)).formatted = false; + return ios; + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, precision const& a) + { + const_cast&>(get_facet >(os)).precision = a.value; + return os; + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, width const& a) + { + const_cast&>(get_facet >(os)).width = a.value; + return os; + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, delimeter const& a) + { + format_punct & fmt(const_cast&>(get_facet >(os))); + + fmt.delim_left = a.value[0]; + fmt.delim_right = a.value[1]; + fmt.separator = a.value[2]; + + return os; + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, order const& a) + { + const_cast&>(get_facet >(os)).order = a.value; + return os; + } +} // namespace io + +namespace detail +{ + template + GLM_FUNC_QUALIFIER std::basic_ostream& + print_vector_on(std::basic_ostream& os, V const& a) + { + typename std::basic_ostream::sentry const cerberus(os); + + if(cerberus) + { + io::format_punct const& fmt(io::get_facet >(os)); + + length_t const& components(type::components); + + if(fmt.formatted) + { + io::basic_state_saver const bss(os); + + os << std::fixed << std::right << std::setprecision(fmt.precision) << std::setfill(fmt.space) << fmt.delim_left; + + for(length_t i(0); i < components; ++i) + { + os << std::setw(fmt.width) << a[i]; + if(components-1 != i) + os << fmt.separator; + } + + os << fmt.delim_right; + } + else + { + for(length_t i(0); i < components; ++i) + { + os << a[i]; + + if(components-1 != i) + os << fmt.space; + } + } + } + + return os; + } +}//namespace detail + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, qua const& a) + { + return detail::print_vector_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, vec<1, T, Q> const& a) + { + return detail::print_vector_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, vec<2, T, Q> const& a) + { + return detail::print_vector_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, vec<3, T, Q> const& a) + { + return detail::print_vector_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, vec<4, T, Q> const& a) + { + return detail::print_vector_on(os, a); + } + +namespace detail +{ + template class M, length_t C, length_t R, typename T, qualifier Q> + GLM_FUNC_QUALIFIER std::basic_ostream& print_matrix_on(std::basic_ostream& os, M const& a) + { + typename std::basic_ostream::sentry const cerberus(os); + + if(cerberus) + { + io::format_punct const& fmt(io::get_facet >(os)); + + length_t const& cols(type >::cols); + length_t const& rows(type >::rows); + + if(fmt.formatted) + { + os << fmt.newline << fmt.delim_left; + + switch(fmt.order) + { + case io::column_major: + { + for(length_t i(0); i < rows; ++i) + { + if (0 != i) + os << fmt.space; + + os << row(a, i); + + if(rows-1 != i) + os << fmt.newline; + } + } + break; + + case io::row_major: + { + for(length_t i(0); i < cols; ++i) + { + if(0 != i) + os << fmt.space; + + os << column(a, i); + + if(cols-1 != i) + os << fmt.newline; + } + } + break; + } + + os << fmt.delim_right; + } + else + { + switch (fmt.order) + { + case io::column_major: + { + for(length_t i(0); i < cols; ++i) + { + os << column(a, i); + + if(cols - 1 != i) + os << fmt.space; + } + } + break; + + case io::row_major: + { + for (length_t i(0); i < rows; ++i) + { + os << row(a, i); + + if (rows-1 != i) + os << fmt.space; + } + } + break; + } + } + } + + return os; + } +}//namespace detail + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<2, 2, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<2, 3, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<2, 4, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<3, 2, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<3, 3, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream & operator<<(std::basic_ostream& os, mat<3, 4, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream & operator<<(std::basic_ostream& os, mat<4, 2, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream & operator<<(std::basic_ostream& os, mat<4, 3, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + + template + GLM_FUNC_QUALIFIER std::basic_ostream & operator<<(std::basic_ostream& os, mat<4, 4, T, Q> const& a) + { + return detail::print_matrix_on(os, a); + } + +namespace detail +{ + template class M, length_t C, length_t R, typename T, qualifier Q> + GLM_FUNC_QUALIFIER std::basic_ostream& print_matrix_pair_on(std::basic_ostream& os, std::pair const, M const> const& a) + { + typename std::basic_ostream::sentry const cerberus(os); + + if(cerberus) + { + io::format_punct const& fmt(io::get_facet >(os)); + M const& ml(a.first); + M const& mr(a.second); + length_t const& cols(type >::cols); + length_t const& rows(type >::rows); + + if(fmt.formatted) + { + os << fmt.newline << fmt.delim_left; + + switch(fmt.order) + { + case io::column_major: + { + for(length_t i(0); i < rows; ++i) + { + if(0 != i) + os << fmt.space; + + os << row(ml, i) << ((rows-1 != i) ? fmt.space : fmt.delim_right) << fmt.space << ((0 != i) ? fmt.space : fmt.delim_left) << row(mr, i); + + if(rows-1 != i) + os << fmt.newline; + } + } + break; + case io::row_major: + { + for(length_t i(0); i < cols; ++i) + { + if(0 != i) + os << fmt.space; + + os << column(ml, i) << ((cols-1 != i) ? fmt.space : fmt.delim_right) << fmt.space << ((0 != i) ? fmt.space : fmt.delim_left) << column(mr, i); + + if(cols-1 != i) + os << fmt.newline; + } + } + break; + } + + os << fmt.delim_right; + } + else + { + os << ml << fmt.space << mr; + } + } + + return os; + } +}//namespace detail + + template + GLM_FUNC_QUALIFIER std::basic_ostream& operator<<( + std::basic_ostream & os, + std::pair const, + mat<4, 4, T, Q> const> const& a) + { + return detail::print_matrix_pair_on(os, a); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/log_base.hpp b/MacroLibX/third_party/glm/gtx/log_base.hpp new file mode 100644 index 0000000..ba28c9d --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/log_base.hpp @@ -0,0 +1,48 @@ +/// @ref gtx_log_base +/// @file glm/gtx/log_base.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_log_base GLM_GTX_log_base +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Logarithm for any base. base can be a vector or a scalar. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_log_base is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_log_base extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_log_base + /// @{ + + /// Logarithm for any base. + /// From GLM_GTX_log_base. + template + GLM_FUNC_DECL genType log( + genType const& x, + genType const& base); + + /// Logarithm for any base. + /// From GLM_GTX_log_base. + template + GLM_FUNC_DECL vec sign( + vec const& x, + vec const& base); + + /// @} +}//namespace glm + +#include "log_base.inl" diff --git a/MacroLibX/third_party/glm/gtx/log_base.inl b/MacroLibX/third_party/glm/gtx/log_base.inl new file mode 100644 index 0000000..4bbb8e8 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/log_base.inl @@ -0,0 +1,16 @@ +/// @ref gtx_log_base + +namespace glm +{ + template + GLM_FUNC_QUALIFIER genType log(genType const& x, genType const& base) + { + return glm::log(x) / glm::log(base); + } + + template + GLM_FUNC_QUALIFIER vec log(vec const& x, vec const& base) + { + return glm::log(x) / glm::log(base); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/matrix_cross_product.hpp b/MacroLibX/third_party/glm/gtx/matrix_cross_product.hpp new file mode 100644 index 0000000..1e585f9 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_cross_product.hpp @@ -0,0 +1,47 @@ +/// @ref gtx_matrix_cross_product +/// @file glm/gtx/matrix_cross_product.hpp +/// +/// @see core (dependence) +/// @see gtx_extented_min_max (dependence) +/// +/// @defgroup gtx_matrix_cross_product GLM_GTX_matrix_cross_product +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Build cross product matrices + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_matrix_cross_product is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_matrix_cross_product extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_matrix_cross_product + /// @{ + + //! Build a cross product matrix. + //! From GLM_GTX_matrix_cross_product extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> matrixCross3( + vec<3, T, Q> const& x); + + //! Build a cross product matrix. + //! From GLM_GTX_matrix_cross_product extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> matrixCross4( + vec<3, T, Q> const& x); + + /// @} +}//namespace glm + +#include "matrix_cross_product.inl" diff --git a/MacroLibX/third_party/glm/gtx/matrix_cross_product.inl b/MacroLibX/third_party/glm/gtx/matrix_cross_product.inl new file mode 100644 index 0000000..3a15397 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_cross_product.inl @@ -0,0 +1,37 @@ +/// @ref gtx_matrix_cross_product + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> matrixCross3 + ( + vec<3, T, Q> const& x + ) + { + mat<3, 3, T, Q> Result(T(0)); + Result[0][1] = x.z; + Result[1][0] = -x.z; + Result[0][2] = -x.y; + Result[2][0] = x.y; + Result[1][2] = x.x; + Result[2][1] = -x.x; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> matrixCross4 + ( + vec<3, T, Q> const& x + ) + { + mat<4, 4, T, Q> Result(T(0)); + Result[0][1] = x.z; + Result[1][0] = -x.z; + Result[0][2] = -x.y; + Result[2][0] = x.y; + Result[1][2] = x.x; + Result[2][1] = -x.x; + return Result; + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/matrix_decompose.hpp b/MacroLibX/third_party/glm/gtx/matrix_decompose.hpp new file mode 100644 index 0000000..acd7a7f --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_decompose.hpp @@ -0,0 +1,46 @@ +/// @ref gtx_matrix_decompose +/// @file glm/gtx/matrix_decompose.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_matrix_decompose GLM_GTX_matrix_decompose +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Decomposes a model matrix to translations, rotation and scale components + +#pragma once + +// Dependencies +#include "../mat4x4.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" +#include "../geometric.hpp" +#include "../gtc/quaternion.hpp" +#include "../gtc/matrix_transform.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_matrix_decompose is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_matrix_decompose extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_matrix_decompose + /// @{ + + /// Decomposes a model matrix to translations, rotation and scale components + /// @see gtx_matrix_decompose + template + GLM_FUNC_DECL bool decompose( + mat<4, 4, T, Q> const& modelMatrix, + vec<3, T, Q> & scale, qua & orientation, vec<3, T, Q> & translation, vec<3, T, Q> & skew, vec<4, T, Q> & perspective); + + /// @} +}//namespace glm + +#include "matrix_decompose.inl" diff --git a/MacroLibX/third_party/glm/gtx/matrix_decompose.inl b/MacroLibX/third_party/glm/gtx/matrix_decompose.inl new file mode 100644 index 0000000..694f5ec --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_decompose.inl @@ -0,0 +1,186 @@ +/// @ref gtx_matrix_decompose + +#include "../gtc/constants.hpp" +#include "../gtc/epsilon.hpp" + +namespace glm{ +namespace detail +{ + /// Make a linear combination of two vectors and return the result. + // result = (a * ascl) + (b * bscl) + template + GLM_FUNC_QUALIFIER vec<3, T, Q> combine( + vec<3, T, Q> const& a, + vec<3, T, Q> const& b, + T ascl, T bscl) + { + return (a * ascl) + (b * bscl); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> scale(vec<3, T, Q> const& v, T desiredLength) + { + return v * desiredLength / length(v); + } +}//namespace detail + + // Matrix decompose + // http://www.opensource.apple.com/source/WebCore/WebCore-514/platform/graphics/transforms/TransformationMatrix.cpp + // Decomposes the mode matrix to translations,rotation scale components + + template + GLM_FUNC_QUALIFIER bool decompose(mat<4, 4, T, Q> const& ModelMatrix, vec<3, T, Q> & Scale, qua & Orientation, vec<3, T, Q> & Translation, vec<3, T, Q> & Skew, vec<4, T, Q> & Perspective) + { + mat<4, 4, T, Q> LocalMatrix(ModelMatrix); + + // Normalize the matrix. + if(epsilonEqual(LocalMatrix[3][3], static_cast(0), epsilon())) + return false; + + for(length_t i = 0; i < 4; ++i) + for(length_t j = 0; j < 4; ++j) + LocalMatrix[i][j] /= LocalMatrix[3][3]; + + // perspectiveMatrix is used to solve for perspective, but it also provides + // an easy way to test for singularity of the upper 3x3 component. + mat<4, 4, T, Q> PerspectiveMatrix(LocalMatrix); + + for(length_t i = 0; i < 3; i++) + PerspectiveMatrix[i][3] = static_cast(0); + PerspectiveMatrix[3][3] = static_cast(1); + + /// TODO: Fixme! + if(epsilonEqual(determinant(PerspectiveMatrix), static_cast(0), epsilon())) + return false; + + // First, isolate perspective. This is the messiest. + if( + epsilonNotEqual(LocalMatrix[0][3], static_cast(0), epsilon()) || + epsilonNotEqual(LocalMatrix[1][3], static_cast(0), epsilon()) || + epsilonNotEqual(LocalMatrix[2][3], static_cast(0), epsilon())) + { + // rightHandSide is the right hand side of the equation. + vec<4, T, Q> RightHandSide; + RightHandSide[0] = LocalMatrix[0][3]; + RightHandSide[1] = LocalMatrix[1][3]; + RightHandSide[2] = LocalMatrix[2][3]; + RightHandSide[3] = LocalMatrix[3][3]; + + // Solve the equation by inverting PerspectiveMatrix and multiplying + // rightHandSide by the inverse. (This is the easiest way, not + // necessarily the best.) + mat<4, 4, T, Q> InversePerspectiveMatrix = glm::inverse(PerspectiveMatrix);// inverse(PerspectiveMatrix, inversePerspectiveMatrix); + mat<4, 4, T, Q> TransposedInversePerspectiveMatrix = glm::transpose(InversePerspectiveMatrix);// transposeMatrix4(inversePerspectiveMatrix, transposedInversePerspectiveMatrix); + + Perspective = TransposedInversePerspectiveMatrix * RightHandSide; + // v4MulPointByMatrix(rightHandSide, transposedInversePerspectiveMatrix, perspectivePoint); + + // Clear the perspective partition + LocalMatrix[0][3] = LocalMatrix[1][3] = LocalMatrix[2][3] = static_cast(0); + LocalMatrix[3][3] = static_cast(1); + } + else + { + // No perspective. + Perspective = vec<4, T, Q>(0, 0, 0, 1); + } + + // Next take care of translation (easy). + Translation = vec<3, T, Q>(LocalMatrix[3]); + LocalMatrix[3] = vec<4, T, Q>(0, 0, 0, LocalMatrix[3].w); + + vec<3, T, Q> Row[3], Pdum3; + + // Now get scale and shear. + for(length_t i = 0; i < 3; ++i) + for(length_t j = 0; j < 3; ++j) + Row[i][j] = LocalMatrix[i][j]; + + // Compute X scale factor and normalize first row. + Scale.x = length(Row[0]);// v3Length(Row[0]); + + Row[0] = detail::scale(Row[0], static_cast(1)); + + // Compute XY shear factor and make 2nd row orthogonal to 1st. + Skew.z = dot(Row[0], Row[1]); + Row[1] = detail::combine(Row[1], Row[0], static_cast(1), -Skew.z); + + // Now, compute Y scale and normalize 2nd row. + Scale.y = length(Row[1]); + Row[1] = detail::scale(Row[1], static_cast(1)); + Skew.z /= Scale.y; + + // Compute XZ and YZ shears, orthogonalize 3rd row. + Skew.y = glm::dot(Row[0], Row[2]); + Row[2] = detail::combine(Row[2], Row[0], static_cast(1), -Skew.y); + Skew.x = glm::dot(Row[1], Row[2]); + Row[2] = detail::combine(Row[2], Row[1], static_cast(1), -Skew.x); + + // Next, get Z scale and normalize 3rd row. + Scale.z = length(Row[2]); + Row[2] = detail::scale(Row[2], static_cast(1)); + Skew.y /= Scale.z; + Skew.x /= Scale.z; + + // At this point, the matrix (in rows[]) is orthonormal. + // Check for a coordinate system flip. If the determinant + // is -1, then negate the matrix and the scaling factors. + Pdum3 = cross(Row[1], Row[2]); // v3Cross(row[1], row[2], Pdum3); + if(dot(Row[0], Pdum3) < 0) + { + for(length_t i = 0; i < 3; i++) + { + Scale[i] *= static_cast(-1); + Row[i] *= static_cast(-1); + } + } + + // Now, get the rotations out, as described in the gem. + + // FIXME - Add the ability to return either quaternions (which are + // easier to recompose with) or Euler angles (rx, ry, rz), which + // are easier for authors to deal with. The latter will only be useful + // when we fix https://bugs.webkit.org/show_bug.cgi?id=23799, so I + // will leave the Euler angle code here for now. + + // ret.rotateY = asin(-Row[0][2]); + // if (cos(ret.rotateY) != 0) { + // ret.rotateX = atan2(Row[1][2], Row[2][2]); + // ret.rotateZ = atan2(Row[0][1], Row[0][0]); + // } else { + // ret.rotateX = atan2(-Row[2][0], Row[1][1]); + // ret.rotateZ = 0; + // } + + int i, j, k = 0; + T root, trace = Row[0].x + Row[1].y + Row[2].z; + if(trace > static_cast(0)) + { + root = sqrt(trace + static_cast(1.0)); + Orientation.w = static_cast(0.5) * root; + root = static_cast(0.5) / root; + Orientation.x = root * (Row[1].z - Row[2].y); + Orientation.y = root * (Row[2].x - Row[0].z); + Orientation.z = root * (Row[0].y - Row[1].x); + } // End if > 0 + else + { + static int Next[3] = {1, 2, 0}; + i = 0; + if(Row[1].y > Row[0].x) i = 1; + if(Row[2].z > Row[i][i]) i = 2; + j = Next[i]; + k = Next[j]; + + root = sqrt(Row[i][i] - Row[j][j] - Row[k][k] + static_cast(1.0)); + + Orientation[i] = static_cast(0.5) * root; + root = static_cast(0.5) / root; + Orientation[j] = root * (Row[i][j] + Row[j][i]); + Orientation[k] = root * (Row[i][k] + Row[k][i]); + Orientation.w = root * (Row[j][k] - Row[k][j]); + } // End if <= 0 + + return true; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/matrix_factorisation.hpp b/MacroLibX/third_party/glm/gtx/matrix_factorisation.hpp new file mode 100644 index 0000000..5a975d6 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_factorisation.hpp @@ -0,0 +1,69 @@ +/// @ref gtx_matrix_factorisation +/// @file glm/gtx/matrix_factorisation.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_matrix_factorisation GLM_GTX_matrix_factorisation +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Functions to factor matrices in various forms + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_matrix_factorisation is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_matrix_factorisation extension included") +# endif +#endif + +/* +Suggestions: + - Move helper functions flipud and fliplr to another file: They may be helpful in more general circumstances. + - Implement other types of matrix factorisation, such as: QL and LQ, L(D)U, eigendecompositions, etc... +*/ + +namespace glm +{ + /// @addtogroup gtx_matrix_factorisation + /// @{ + + /// Flips the matrix rows up and down. + /// + /// From GLM_GTX_matrix_factorisation extension. + template + GLM_FUNC_DECL mat flipud(mat const& in); + + /// Flips the matrix columns right and left. + /// + /// From GLM_GTX_matrix_factorisation extension. + template + GLM_FUNC_DECL mat fliplr(mat const& in); + + /// Performs QR factorisation of a matrix. + /// Returns 2 matrices, q and r, such that the columns of q are orthonormal and span the same subspace than those of the input matrix, r is an upper triangular matrix, and q*r=in. + /// Given an n-by-m input matrix, q has dimensions min(n,m)-by-m, and r has dimensions n-by-min(n,m). + /// + /// From GLM_GTX_matrix_factorisation extension. + template + GLM_FUNC_DECL void qr_decompose(mat const& in, mat<(C < R ? C : R), R, T, Q>& q, mat& r); + + /// Performs RQ factorisation of a matrix. + /// Returns 2 matrices, r and q, such that r is an upper triangular matrix, the rows of q are orthonormal and span the same subspace than those of the input matrix, and r*q=in. + /// Note that in the context of RQ factorisation, the diagonal is seen as starting in the lower-right corner of the matrix, instead of the usual upper-left. + /// Given an n-by-m input matrix, r has dimensions min(n,m)-by-m, and q has dimensions n-by-min(n,m). + /// + /// From GLM_GTX_matrix_factorisation extension. + template + GLM_FUNC_DECL void rq_decompose(mat const& in, mat<(C < R ? C : R), R, T, Q>& r, mat& q); + + /// @} +} + +#include "matrix_factorisation.inl" diff --git a/MacroLibX/third_party/glm/gtx/matrix_factorisation.inl b/MacroLibX/third_party/glm/gtx/matrix_factorisation.inl new file mode 100644 index 0000000..c479b8a --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_factorisation.inl @@ -0,0 +1,84 @@ +/// @ref gtx_matrix_factorisation + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat flipud(mat const& in) + { + mat tin = transpose(in); + tin = fliplr(tin); + mat out = transpose(tin); + + return out; + } + + template + GLM_FUNC_QUALIFIER mat fliplr(mat const& in) + { + mat out; + for (length_t i = 0; i < C; i++) + { + out[i] = in[(C - i) - 1]; + } + + return out; + } + + template + GLM_FUNC_QUALIFIER void qr_decompose(mat const& in, mat<(C < R ? C : R), R, T, Q>& q, mat& r) + { + // Uses modified Gram-Schmidt method + // Source: https://en.wikipedia.org/wiki/GramSchmidt_process + // And https://en.wikipedia.org/wiki/QR_decomposition + + //For all the linearly independs columns of the input... + // (there can be no more linearly independents columns than there are rows.) + for (length_t i = 0; i < (C < R ? C : R); i++) + { + //Copy in Q the input's i-th column. + q[i] = in[i]; + + //j = [0,i[ + // Make that column orthogonal to all the previous ones by substracting to it the non-orthogonal projection of all the previous columns. + // Also: Fill the zero elements of R + for (length_t j = 0; j < i; j++) + { + q[i] -= dot(q[i], q[j])*q[j]; + r[j][i] = 0; + } + + //Now, Q i-th column is orthogonal to all the previous columns. Normalize it. + q[i] = normalize(q[i]); + + //j = [i,C[ + //Finally, compute the corresponding coefficients of R by computing the projection of the resulting column on the other columns of the input. + for (length_t j = i; j < C; j++) + { + r[j][i] = dot(in[j], q[i]); + } + } + } + + template + GLM_FUNC_QUALIFIER void rq_decompose(mat const& in, mat<(C < R ? C : R), R, T, Q>& r, mat& q) + { + // From https://en.wikipedia.org/wiki/QR_decomposition: + // The RQ decomposition transforms a matrix A into the product of an upper triangular matrix R (also known as right-triangular) and an orthogonal matrix Q. The only difference from QR decomposition is the order of these matrices. + // QR decomposition is GramSchmidt orthogonalization of columns of A, started from the first column. + // RQ decomposition is GramSchmidt orthogonalization of rows of A, started from the last row. + + mat tin = transpose(in); + tin = fliplr(tin); + + mat tr; + mat<(C < R ? C : R), C, T, Q> tq; + qr_decompose(tin, tq, tr); + + tr = fliplr(tr); + r = transpose(tr); + r = fliplr(r); + + tq = fliplr(tq); + q = transpose(tq); + } +} //namespace glm diff --git a/MacroLibX/third_party/glm/gtx/matrix_interpolation.hpp b/MacroLibX/third_party/glm/gtx/matrix_interpolation.hpp new file mode 100644 index 0000000..7d5ad4c --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_interpolation.hpp @@ -0,0 +1,60 @@ +/// @ref gtx_matrix_interpolation +/// @file glm/gtx/matrix_interpolation.hpp +/// @author Ghenadii Ursachi (the.asteroth@gmail.com) +/// +/// @see core (dependence) +/// +/// @defgroup gtx_matrix_interpolation GLM_GTX_matrix_interpolation +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Allows to directly interpolate two matrices. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_matrix_interpolation is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_matrix_interpolation extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_matrix_interpolation + /// @{ + + /// Get the axis and angle of the rotation from a matrix. + /// From GLM_GTX_matrix_interpolation extension. + template + GLM_FUNC_DECL void axisAngle( + mat<4, 4, T, Q> const& Mat, vec<3, T, Q> & Axis, T & Angle); + + /// Build a matrix from axis and angle. + /// From GLM_GTX_matrix_interpolation extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> axisAngleMatrix( + vec<3, T, Q> const& Axis, T const Angle); + + /// Extracts the rotation part of a matrix. + /// From GLM_GTX_matrix_interpolation extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> extractMatrixRotation( + mat<4, 4, T, Q> const& Mat); + + /// Build a interpolation of 4 * 4 matrixes. + /// From GLM_GTX_matrix_interpolation extension. + /// Warning! works only with rotation and/or translation matrixes, scale will generate unexpected results. + template + GLM_FUNC_DECL mat<4, 4, T, Q> interpolate( + mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2, T const Delta); + + /// @} +}//namespace glm + +#include "matrix_interpolation.inl" diff --git a/MacroLibX/third_party/glm/gtx/matrix_interpolation.inl b/MacroLibX/third_party/glm/gtx/matrix_interpolation.inl new file mode 100644 index 0000000..de40b7d --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_interpolation.inl @@ -0,0 +1,129 @@ +/// @ref gtx_matrix_interpolation + +#include "../gtc/constants.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER void axisAngle(mat<4, 4, T, Q> const& m, vec<3, T, Q> & axis, T& angle) + { + T epsilon = static_cast(0.01); + T epsilon2 = static_cast(0.1); + + if((abs(m[1][0] - m[0][1]) < epsilon) && (abs(m[2][0] - m[0][2]) < epsilon) && (abs(m[2][1] - m[1][2]) < epsilon)) + { + if ((abs(m[1][0] + m[0][1]) < epsilon2) && (abs(m[2][0] + m[0][2]) < epsilon2) && (abs(m[2][1] + m[1][2]) < epsilon2) && (abs(m[0][0] + m[1][1] + m[2][2] - static_cast(3.0)) < epsilon2)) + { + angle = static_cast(0.0); + axis.x = static_cast(1.0); + axis.y = static_cast(0.0); + axis.z = static_cast(0.0); + return; + } + angle = static_cast(3.1415926535897932384626433832795); + T xx = (m[0][0] + static_cast(1.0)) * static_cast(0.5); + T yy = (m[1][1] + static_cast(1.0)) * static_cast(0.5); + T zz = (m[2][2] + static_cast(1.0)) * static_cast(0.5); + T xy = (m[1][0] + m[0][1]) * static_cast(0.25); + T xz = (m[2][0] + m[0][2]) * static_cast(0.25); + T yz = (m[2][1] + m[1][2]) * static_cast(0.25); + if((xx > yy) && (xx > zz)) + { + if(xx < epsilon) + { + axis.x = static_cast(0.0); + axis.y = static_cast(0.7071); + axis.z = static_cast(0.7071); + } + else + { + axis.x = sqrt(xx); + axis.y = xy / axis.x; + axis.z = xz / axis.x; + } + } + else if (yy > zz) + { + if(yy < epsilon) + { + axis.x = static_cast(0.7071); + axis.y = static_cast(0.0); + axis.z = static_cast(0.7071); + } + else + { + axis.y = sqrt(yy); + axis.x = xy / axis.y; + axis.z = yz / axis.y; + } + } + else + { + if (zz < epsilon) + { + axis.x = static_cast(0.7071); + axis.y = static_cast(0.7071); + axis.z = static_cast(0.0); + } + else + { + axis.z = sqrt(zz); + axis.x = xz / axis.z; + axis.y = yz / axis.z; + } + } + return; + } + T s = sqrt((m[2][1] - m[1][2]) * (m[2][1] - m[1][2]) + (m[2][0] - m[0][2]) * (m[2][0] - m[0][2]) + (m[1][0] - m[0][1]) * (m[1][0] - m[0][1])); + if (glm::abs(s) < T(0.001)) + s = static_cast(1); + T const angleCos = (m[0][0] + m[1][1] + m[2][2] - static_cast(1)) * static_cast(0.5); + if(angleCos - static_cast(1) < epsilon) + angle = pi() * static_cast(0.25); + else + angle = acos(angleCos); + axis.x = (m[1][2] - m[2][1]) / s; + axis.y = (m[2][0] - m[0][2]) / s; + axis.z = (m[0][1] - m[1][0]) / s; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> axisAngleMatrix(vec<3, T, Q> const& axis, T const angle) + { + T c = cos(angle); + T s = sin(angle); + T t = static_cast(1) - c; + vec<3, T, Q> n = normalize(axis); + + return mat<4, 4, T, Q>( + t * n.x * n.x + c, t * n.x * n.y + n.z * s, t * n.x * n.z - n.y * s, static_cast(0.0), + t * n.x * n.y - n.z * s, t * n.y * n.y + c, t * n.y * n.z + n.x * s, static_cast(0.0), + t * n.x * n.z + n.y * s, t * n.y * n.z - n.x * s, t * n.z * n.z + c, static_cast(0.0), + static_cast(0.0), static_cast(0.0), static_cast(0.0), static_cast(1.0)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> extractMatrixRotation(mat<4, 4, T, Q> const& m) + { + return mat<4, 4, T, Q>( + m[0][0], m[0][1], m[0][2], static_cast(0.0), + m[1][0], m[1][1], m[1][2], static_cast(0.0), + m[2][0], m[2][1], m[2][2], static_cast(0.0), + static_cast(0.0), static_cast(0.0), static_cast(0.0), static_cast(1.0)); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> interpolate(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2, T const delta) + { + mat<4, 4, T, Q> m1rot = extractMatrixRotation(m1); + mat<4, 4, T, Q> dltRotation = m2 * transpose(m1rot); + vec<3, T, Q> dltAxis; + T dltAngle; + axisAngle(dltRotation, dltAxis, dltAngle); + mat<4, 4, T, Q> out = axisAngleMatrix(dltAxis, dltAngle * delta) * m1rot; + out[3][0] = m1[3][0] + delta * (m2[3][0] - m1[3][0]); + out[3][1] = m1[3][1] + delta * (m2[3][1] - m1[3][1]); + out[3][2] = m1[3][2] + delta * (m2[3][2] - m1[3][2]); + return out; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/matrix_major_storage.hpp b/MacroLibX/third_party/glm/gtx/matrix_major_storage.hpp new file mode 100644 index 0000000..8c6bc22 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_major_storage.hpp @@ -0,0 +1,119 @@ +/// @ref gtx_matrix_major_storage +/// @file glm/gtx/matrix_major_storage.hpp +/// +/// @see core (dependence) +/// @see gtx_extented_min_max (dependence) +/// +/// @defgroup gtx_matrix_major_storage GLM_GTX_matrix_major_storage +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Build matrices with specific matrix order, row or column + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_matrix_major_storage is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_matrix_major_storage extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_matrix_major_storage + /// @{ + + //! Build a row major matrix from row vectors. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<2, 2, T, Q> rowMajor2( + vec<2, T, Q> const& v1, + vec<2, T, Q> const& v2); + + //! Build a row major matrix from other matrix. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<2, 2, T, Q> rowMajor2( + mat<2, 2, T, Q> const& m); + + //! Build a row major matrix from row vectors. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> rowMajor3( + vec<3, T, Q> const& v1, + vec<3, T, Q> const& v2, + vec<3, T, Q> const& v3); + + //! Build a row major matrix from other matrix. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> rowMajor3( + mat<3, 3, T, Q> const& m); + + //! Build a row major matrix from row vectors. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> rowMajor4( + vec<4, T, Q> const& v1, + vec<4, T, Q> const& v2, + vec<4, T, Q> const& v3, + vec<4, T, Q> const& v4); + + //! Build a row major matrix from other matrix. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> rowMajor4( + mat<4, 4, T, Q> const& m); + + //! Build a column major matrix from column vectors. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<2, 2, T, Q> colMajor2( + vec<2, T, Q> const& v1, + vec<2, T, Q> const& v2); + + //! Build a column major matrix from other matrix. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<2, 2, T, Q> colMajor2( + mat<2, 2, T, Q> const& m); + + //! Build a column major matrix from column vectors. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> colMajor3( + vec<3, T, Q> const& v1, + vec<3, T, Q> const& v2, + vec<3, T, Q> const& v3); + + //! Build a column major matrix from other matrix. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> colMajor3( + mat<3, 3, T, Q> const& m); + + //! Build a column major matrix from column vectors. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> colMajor4( + vec<4, T, Q> const& v1, + vec<4, T, Q> const& v2, + vec<4, T, Q> const& v3, + vec<4, T, Q> const& v4); + + //! Build a column major matrix from other matrix. + //! From GLM_GTX_matrix_major_storage extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> colMajor4( + mat<4, 4, T, Q> const& m); + + /// @} +}//namespace glm + +#include "matrix_major_storage.inl" diff --git a/MacroLibX/third_party/glm/gtx/matrix_major_storage.inl b/MacroLibX/third_party/glm/gtx/matrix_major_storage.inl new file mode 100644 index 0000000..279dd34 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_major_storage.inl @@ -0,0 +1,166 @@ +/// @ref gtx_matrix_major_storage + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> rowMajor2 + ( + vec<2, T, Q> const& v1, + vec<2, T, Q> const& v2 + ) + { + mat<2, 2, T, Q> Result; + Result[0][0] = v1.x; + Result[1][0] = v1.y; + Result[0][1] = v2.x; + Result[1][1] = v2.y; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> rowMajor2( + const mat<2, 2, T, Q>& m) + { + mat<2, 2, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> rowMajor3( + const vec<3, T, Q>& v1, + const vec<3, T, Q>& v2, + const vec<3, T, Q>& v3) + { + mat<3, 3, T, Q> Result; + Result[0][0] = v1.x; + Result[1][0] = v1.y; + Result[2][0] = v1.z; + Result[0][1] = v2.x; + Result[1][1] = v2.y; + Result[2][1] = v2.z; + Result[0][2] = v3.x; + Result[1][2] = v3.y; + Result[2][2] = v3.z; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> rowMajor3( + const mat<3, 3, T, Q>& m) + { + mat<3, 3, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[0][2] = m[2][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[1][2] = m[2][1]; + Result[2][0] = m[0][2]; + Result[2][1] = m[1][2]; + Result[2][2] = m[2][2]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rowMajor4( + const vec<4, T, Q>& v1, + const vec<4, T, Q>& v2, + const vec<4, T, Q>& v3, + const vec<4, T, Q>& v4) + { + mat<4, 4, T, Q> Result; + Result[0][0] = v1.x; + Result[1][0] = v1.y; + Result[2][0] = v1.z; + Result[3][0] = v1.w; + Result[0][1] = v2.x; + Result[1][1] = v2.y; + Result[2][1] = v2.z; + Result[3][1] = v2.w; + Result[0][2] = v3.x; + Result[1][2] = v3.y; + Result[2][2] = v3.z; + Result[3][2] = v3.w; + Result[0][3] = v4.x; + Result[1][3] = v4.y; + Result[2][3] = v4.z; + Result[3][3] = v4.w; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rowMajor4( + const mat<4, 4, T, Q>& m) + { + mat<4, 4, T, Q> Result; + Result[0][0] = m[0][0]; + Result[0][1] = m[1][0]; + Result[0][2] = m[2][0]; + Result[0][3] = m[3][0]; + Result[1][0] = m[0][1]; + Result[1][1] = m[1][1]; + Result[1][2] = m[2][1]; + Result[1][3] = m[3][1]; + Result[2][0] = m[0][2]; + Result[2][1] = m[1][2]; + Result[2][2] = m[2][2]; + Result[2][3] = m[3][2]; + Result[3][0] = m[0][3]; + Result[3][1] = m[1][3]; + Result[3][2] = m[2][3]; + Result[3][3] = m[3][3]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> colMajor2( + const vec<2, T, Q>& v1, + const vec<2, T, Q>& v2) + { + return mat<2, 2, T, Q>(v1, v2); + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> colMajor2( + const mat<2, 2, T, Q>& m) + { + return mat<2, 2, T, Q>(m); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> colMajor3( + const vec<3, T, Q>& v1, + const vec<3, T, Q>& v2, + const vec<3, T, Q>& v3) + { + return mat<3, 3, T, Q>(v1, v2, v3); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> colMajor3( + const mat<3, 3, T, Q>& m) + { + return mat<3, 3, T, Q>(m); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> colMajor4( + const vec<4, T, Q>& v1, + const vec<4, T, Q>& v2, + const vec<4, T, Q>& v3, + const vec<4, T, Q>& v4) + { + return mat<4, 4, T, Q>(v1, v2, v3, v4); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> colMajor4( + const mat<4, 4, T, Q>& m) + { + return mat<4, 4, T, Q>(m); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/matrix_operation.hpp b/MacroLibX/third_party/glm/gtx/matrix_operation.hpp new file mode 100644 index 0000000..de6ff1f --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_operation.hpp @@ -0,0 +1,103 @@ +/// @ref gtx_matrix_operation +/// @file glm/gtx/matrix_operation.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_matrix_operation GLM_GTX_matrix_operation +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Build diagonal matrices from vectors. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_matrix_operation is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_matrix_operation extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_matrix_operation + /// @{ + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<2, 2, T, Q> diagonal2x2( + vec<2, T, Q> const& v); + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<2, 3, T, Q> diagonal2x3( + vec<2, T, Q> const& v); + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<2, 4, T, Q> diagonal2x4( + vec<2, T, Q> const& v); + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<3, 2, T, Q> diagonal3x2( + vec<2, T, Q> const& v); + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> diagonal3x3( + vec<3, T, Q> const& v); + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<3, 4, T, Q> diagonal3x4( + vec<3, T, Q> const& v); + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<4, 2, T, Q> diagonal4x2( + vec<2, T, Q> const& v); + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<4, 3, T, Q> diagonal4x3( + vec<3, T, Q> const& v); + + //! Build a diagonal matrix. + //! From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> diagonal4x4( + vec<4, T, Q> const& v); + + /// Build an adjugate matrix. + /// From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<2, 2, T, Q> adjugate(mat<2, 2, T, Q> const& m); + + /// Build an adjugate matrix. + /// From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> adjugate(mat<3, 3, T, Q> const& m); + + /// Build an adjugate matrix. + /// From GLM_GTX_matrix_operation extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> adjugate(mat<4, 4, T, Q> const& m); + + /// @} +}//namespace glm + +#include "matrix_operation.inl" diff --git a/MacroLibX/third_party/glm/gtx/matrix_operation.inl b/MacroLibX/third_party/glm/gtx/matrix_operation.inl new file mode 100644 index 0000000..9de83f8 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_operation.inl @@ -0,0 +1,176 @@ +/// @ref gtx_matrix_operation + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> diagonal2x2 + ( + vec<2, T, Q> const& v + ) + { + mat<2, 2, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 3, T, Q> diagonal2x3 + ( + vec<2, T, Q> const& v + ) + { + mat<2, 3, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 4, T, Q> diagonal2x4 + ( + vec<2, T, Q> const& v + ) + { + mat<2, 4, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 2, T, Q> diagonal3x2 + ( + vec<2, T, Q> const& v + ) + { + mat<3, 2, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> diagonal3x3 + ( + vec<3, T, Q> const& v + ) + { + mat<3, 3, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + Result[2][2] = v[2]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 4, T, Q> diagonal3x4 + ( + vec<3, T, Q> const& v + ) + { + mat<3, 4, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + Result[2][2] = v[2]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> diagonal4x4 + ( + vec<4, T, Q> const& v + ) + { + mat<4, 4, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + Result[2][2] = v[2]; + Result[3][3] = v[3]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 3, T, Q> diagonal4x3 + ( + vec<3, T, Q> const& v + ) + { + mat<4, 3, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + Result[2][2] = v[2]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 2, T, Q> diagonal4x2 + ( + vec<2, T, Q> const& v + ) + { + mat<4, 2, T, Q> Result(static_cast(1)); + Result[0][0] = v[0]; + Result[1][1] = v[1]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<2, 2, T, Q> adjugate(mat<2, 2, T, Q> const& m) + { + return mat<2, 2, T, Q>( + +m[1][1], -m[1][0], + -m[0][1], +m[0][0]); + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> adjugate(mat<3, 3, T, Q> const& m) + { + T const m00 = determinant(mat<2, 2, T, Q>(m[1][1], m[2][1], m[1][2], m[2][2])); + T const m01 = determinant(mat<2, 2, T, Q>(m[0][1], m[2][1], m[0][2], m[2][2])); + T const m02 = determinant(mat<2, 2, T, Q>(m[0][1], m[1][1], m[0][2], m[1][2])); + + T const m10 = determinant(mat<2, 2, T, Q>(m[1][0], m[2][0], m[1][2], m[2][2])); + T const m11 = determinant(mat<2, 2, T, Q>(m[0][0], m[2][0], m[0][2], m[2][2])); + T const m12 = determinant(mat<2, 2, T, Q>(m[0][0], m[1][0], m[0][2], m[1][2])); + + T const m20 = determinant(mat<2, 2, T, Q>(m[1][0], m[2][0], m[1][1], m[2][1])); + T const m21 = determinant(mat<2, 2, T, Q>(m[0][0], m[2][0], m[0][1], m[2][1])); + T const m22 = determinant(mat<2, 2, T, Q>(m[0][0], m[1][0], m[0][1], m[1][1])); + + return mat<3, 3, T, Q>( + +m00, -m01, +m02, + -m10, +m11, -m12, + +m20, -m21, +m22); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> adjugate(mat<4, 4, T, Q> const& m) + { + T const m00 = determinant(mat<3, 3, T, Q>(m[1][1], m[1][2], m[1][3], m[2][1], m[2][2], m[2][3], m[3][1], m[3][2], m[3][3])); + T const m01 = determinant(mat<3, 3, T, Q>(m[1][0], m[1][2], m[1][3], m[2][0], m[2][2], m[2][3], m[3][0], m[3][2], m[3][3])); + T const m02 = determinant(mat<3, 3, T, Q>(m[1][0], m[1][1], m[1][3], m[2][0], m[2][2], m[2][3], m[3][0], m[3][1], m[3][3])); + T const m03 = determinant(mat<3, 3, T, Q>(m[1][0], m[1][1], m[1][2], m[2][0], m[2][1], m[2][2], m[3][0], m[3][1], m[3][2])); + + T const m10 = determinant(mat<3, 3, T, Q>(m[0][1], m[0][2], m[0][3], m[2][1], m[2][2], m[2][3], m[3][1], m[3][2], m[3][3])); + T const m11 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][2], m[0][3], m[2][0], m[2][2], m[2][3], m[3][0], m[3][2], m[3][3])); + T const m12 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][3], m[2][0], m[2][1], m[2][3], m[3][0], m[3][1], m[3][3])); + T const m13 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][2], m[2][0], m[2][1], m[2][2], m[3][0], m[3][1], m[3][2])); + + T const m20 = determinant(mat<3, 3, T, Q>(m[0][1], m[0][2], m[0][3], m[1][1], m[1][2], m[1][3], m[3][1], m[3][2], m[3][3])); + T const m21 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][2], m[0][3], m[1][0], m[1][2], m[1][3], m[3][0], m[3][2], m[3][3])); + T const m22 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][3], m[1][0], m[1][1], m[1][3], m[3][0], m[3][1], m[3][3])); + T const m23 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][2], m[1][0], m[1][1], m[1][2], m[3][0], m[3][1], m[3][2])); + + T const m30 = determinant(mat<3, 3, T, Q>(m[0][1], m[0][2], m[0][3], m[1][1], m[1][2], m[1][3], m[2][1], m[2][2], m[2][3])); + T const m31 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][2], m[0][3], m[1][0], m[1][2], m[1][3], m[2][0], m[2][2], m[2][3])); + T const m32 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][3], m[1][0], m[1][1], m[1][3], m[2][0], m[2][1], m[2][3])); + T const m33 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][2], m[1][0], m[1][1], m[1][2], m[2][0], m[2][1], m[2][2])); + + return mat<4, 4, T, Q>( + +m00, -m01, +m02, -m03, + -m10, +m11, -m12, +m13, + +m20, -m21, +m22, -m23, + -m30, +m31, -m32, +m33); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/matrix_query.hpp b/MacroLibX/third_party/glm/gtx/matrix_query.hpp new file mode 100644 index 0000000..8011b2b --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_query.hpp @@ -0,0 +1,77 @@ +/// @ref gtx_matrix_query +/// @file glm/gtx/matrix_query.hpp +/// +/// @see core (dependence) +/// @see gtx_vector_query (dependence) +/// +/// @defgroup gtx_matrix_query GLM_GTX_matrix_query +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Query to evaluate matrix properties + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtx/vector_query.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_matrix_query is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_matrix_query extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_matrix_query + /// @{ + + /// Return whether a matrix a null matrix. + /// From GLM_GTX_matrix_query extension. + template + GLM_FUNC_DECL bool isNull(mat<2, 2, T, Q> const& m, T const& epsilon); + + /// Return whether a matrix a null matrix. + /// From GLM_GTX_matrix_query extension. + template + GLM_FUNC_DECL bool isNull(mat<3, 3, T, Q> const& m, T const& epsilon); + + /// Return whether a matrix is a null matrix. + /// From GLM_GTX_matrix_query extension. + template + GLM_FUNC_DECL bool isNull(mat<4, 4, T, Q> const& m, T const& epsilon); + + /// Return whether a matrix is an identity matrix. + /// From GLM_GTX_matrix_query extension. + template class matType> + GLM_FUNC_DECL bool isIdentity(matType const& m, T const& epsilon); + + /// Return whether a matrix is a normalized matrix. + /// From GLM_GTX_matrix_query extension. + template + GLM_FUNC_DECL bool isNormalized(mat<2, 2, T, Q> const& m, T const& epsilon); + + /// Return whether a matrix is a normalized matrix. + /// From GLM_GTX_matrix_query extension. + template + GLM_FUNC_DECL bool isNormalized(mat<3, 3, T, Q> const& m, T const& epsilon); + + /// Return whether a matrix is a normalized matrix. + /// From GLM_GTX_matrix_query extension. + template + GLM_FUNC_DECL bool isNormalized(mat<4, 4, T, Q> const& m, T const& epsilon); + + /// Return whether a matrix is an orthonormalized matrix. + /// From GLM_GTX_matrix_query extension. + template class matType> + GLM_FUNC_DECL bool isOrthogonal(matType const& m, T const& epsilon); + + /// @} +}//namespace glm + +#include "matrix_query.inl" diff --git a/MacroLibX/third_party/glm/gtx/matrix_query.inl b/MacroLibX/third_party/glm/gtx/matrix_query.inl new file mode 100644 index 0000000..77bd231 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_query.inl @@ -0,0 +1,113 @@ +/// @ref gtx_matrix_query + +namespace glm +{ + template + GLM_FUNC_QUALIFIER bool isNull(mat<2, 2, T, Q> const& m, T const& epsilon) + { + bool result = true; + for(length_t i = 0; result && i < m.length() ; ++i) + result = isNull(m[i], epsilon); + return result; + } + + template + GLM_FUNC_QUALIFIER bool isNull(mat<3, 3, T, Q> const& m, T const& epsilon) + { + bool result = true; + for(length_t i = 0; result && i < m.length() ; ++i) + result = isNull(m[i], epsilon); + return result; + } + + template + GLM_FUNC_QUALIFIER bool isNull(mat<4, 4, T, Q> const& m, T const& epsilon) + { + bool result = true; + for(length_t i = 0; result && i < m.length() ; ++i) + result = isNull(m[i], epsilon); + return result; + } + + template + GLM_FUNC_QUALIFIER bool isIdentity(mat const& m, T const& epsilon) + { + bool result = true; + for(length_t i = 0; result && i < m[0].length() ; ++i) + { + for(length_t j = 0; result && j < i ; ++j) + result = abs(m[i][j]) <= epsilon; + if(result) + result = abs(m[i][i] - 1) <= epsilon; + for(length_t j = i + 1; result && j < m.length(); ++j) + result = abs(m[i][j]) <= epsilon; + } + return result; + } + + template + GLM_FUNC_QUALIFIER bool isNormalized(mat<2, 2, T, Q> const& m, T const& epsilon) + { + bool result(true); + for(length_t i = 0; result && i < m.length(); ++i) + result = isNormalized(m[i], epsilon); + for(length_t i = 0; result && i < m.length(); ++i) + { + typename mat<2, 2, T, Q>::col_type v; + for(length_t j = 0; j < m.length(); ++j) + v[j] = m[j][i]; + result = isNormalized(v, epsilon); + } + return result; + } + + template + GLM_FUNC_QUALIFIER bool isNormalized(mat<3, 3, T, Q> const& m, T const& epsilon) + { + bool result(true); + for(length_t i = 0; result && i < m.length(); ++i) + result = isNormalized(m[i], epsilon); + for(length_t i = 0; result && i < m.length(); ++i) + { + typename mat<3, 3, T, Q>::col_type v; + for(length_t j = 0; j < m.length(); ++j) + v[j] = m[j][i]; + result = isNormalized(v, epsilon); + } + return result; + } + + template + GLM_FUNC_QUALIFIER bool isNormalized(mat<4, 4, T, Q> const& m, T const& epsilon) + { + bool result(true); + for(length_t i = 0; result && i < m.length(); ++i) + result = isNormalized(m[i], epsilon); + for(length_t i = 0; result && i < m.length(); ++i) + { + typename mat<4, 4, T, Q>::col_type v; + for(length_t j = 0; j < m.length(); ++j) + v[j] = m[j][i]; + result = isNormalized(v, epsilon); + } + return result; + } + + template + GLM_FUNC_QUALIFIER bool isOrthogonal(mat const& m, T const& epsilon) + { + bool result = true; + for(length_t i(0); result && i < m.length() - 1; ++i) + for(length_t j(i + 1); result && j < m.length(); ++j) + result = areOrthogonal(m[i], m[j], epsilon); + + if(result) + { + mat tmp = transpose(m); + for(length_t i(0); result && i < m.length() - 1 ; ++i) + for(length_t j(i + 1); result && j < m.length(); ++j) + result = areOrthogonal(tmp[i], tmp[j], epsilon); + } + return result; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/matrix_transform_2d.hpp b/MacroLibX/third_party/glm/gtx/matrix_transform_2d.hpp new file mode 100644 index 0000000..5f9c540 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_transform_2d.hpp @@ -0,0 +1,81 @@ +/// @ref gtx_matrix_transform_2d +/// @file glm/gtx/matrix_transform_2d.hpp +/// @author Miguel Ángel Pérez Martínez +/// +/// @see core (dependence) +/// +/// @defgroup gtx_matrix_transform_2d GLM_GTX_matrix_transform_2d +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Defines functions that generate common 2d transformation matrices. + +#pragma once + +// Dependency: +#include "../mat3x3.hpp" +#include "../vec2.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_matrix_transform_2d is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_matrix_transform_2d extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_matrix_transform_2d + /// @{ + + /// Builds a translation 3 * 3 matrix created from a vector of 2 components. + /// + /// @param m Input matrix multiplied by this translation matrix. + /// @param v Coordinates of a translation vector. + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> translate( + mat<3, 3, T, Q> const& m, + vec<2, T, Q> const& v); + + /// Builds a rotation 3 * 3 matrix created from an angle. + /// + /// @param m Input matrix multiplied by this translation matrix. + /// @param angle Rotation angle expressed in radians. + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> rotate( + mat<3, 3, T, Q> const& m, + T angle); + + /// Builds a scale 3 * 3 matrix created from a vector of 2 components. + /// + /// @param m Input matrix multiplied by this translation matrix. + /// @param v Coordinates of a scale vector. + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> scale( + mat<3, 3, T, Q> const& m, + vec<2, T, Q> const& v); + + /// Builds an horizontal (parallel to the x axis) shear 3 * 3 matrix. + /// + /// @param m Input matrix multiplied by this translation matrix. + /// @param y Shear factor. + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearX( + mat<3, 3, T, Q> const& m, + T y); + + /// Builds a vertical (parallel to the y axis) shear 3 * 3 matrix. + /// + /// @param m Input matrix multiplied by this translation matrix. + /// @param x Shear factor. + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearY( + mat<3, 3, T, Q> const& m, + T x); + + /// @} +}//namespace glm + +#include "matrix_transform_2d.inl" diff --git a/MacroLibX/third_party/glm/gtx/matrix_transform_2d.inl b/MacroLibX/third_party/glm/gtx/matrix_transform_2d.inl new file mode 100644 index 0000000..a68d24d --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/matrix_transform_2d.inl @@ -0,0 +1,68 @@ +/// @ref gtx_matrix_transform_2d +/// @author Miguel Ángel Pérez Martínez + +#include "../trigonometric.hpp" + +namespace glm +{ + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> translate( + mat<3, 3, T, Q> const& m, + vec<2, T, Q> const& v) + { + mat<3, 3, T, Q> Result(m); + Result[2] = m[0] * v[0] + m[1] * v[1] + m[2]; + return Result; + } + + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> rotate( + mat<3, 3, T, Q> const& m, + T angle) + { + T const a = angle; + T const c = cos(a); + T const s = sin(a); + + mat<3, 3, T, Q> Result; + Result[0] = m[0] * c + m[1] * s; + Result[1] = m[0] * -s + m[1] * c; + Result[2] = m[2]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> scale( + mat<3, 3, T, Q> const& m, + vec<2, T, Q> const& v) + { + mat<3, 3, T, Q> Result; + Result[0] = m[0] * v[0]; + Result[1] = m[1] * v[1]; + Result[2] = m[2]; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearX( + mat<3, 3, T, Q> const& m, + T y) + { + mat<3, 3, T, Q> Result(1); + Result[0][1] = y; + return m * Result; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearY( + mat<3, 3, T, Q> const& m, + T x) + { + mat<3, 3, T, Q> Result(1); + Result[1][0] = x; + return m * Result; + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/mixed_product.hpp b/MacroLibX/third_party/glm/gtx/mixed_product.hpp new file mode 100644 index 0000000..b242e35 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/mixed_product.hpp @@ -0,0 +1,41 @@ +/// @ref gtx_mixed_product +/// @file glm/gtx/mixed_product.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_mixed_product GLM_GTX_mixed_producte +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Mixed product of 3 vectors. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_mixed_product is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_mixed_product extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_mixed_product + /// @{ + + /// @brief Mixed product of 3 vectors (from GLM_GTX_mixed_product extension) + template + GLM_FUNC_DECL T mixedProduct( + vec<3, T, Q> const& v1, + vec<3, T, Q> const& v2, + vec<3, T, Q> const& v3); + + /// @} +}// namespace glm + +#include "mixed_product.inl" diff --git a/MacroLibX/third_party/glm/gtx/mixed_product.inl b/MacroLibX/third_party/glm/gtx/mixed_product.inl new file mode 100644 index 0000000..e5cdbdb --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/mixed_product.inl @@ -0,0 +1,15 @@ +/// @ref gtx_mixed_product + +namespace glm +{ + template + GLM_FUNC_QUALIFIER T mixedProduct + ( + vec<3, T, Q> const& v1, + vec<3, T, Q> const& v2, + vec<3, T, Q> const& v3 + ) + { + return dot(cross(v1, v2), v3); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/norm.hpp b/MacroLibX/third_party/glm/gtx/norm.hpp new file mode 100644 index 0000000..dfaebb7 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/norm.hpp @@ -0,0 +1,88 @@ +/// @ref gtx_norm +/// @file glm/gtx/norm.hpp +/// +/// @see core (dependence) +/// @see gtx_quaternion (dependence) +/// @see gtx_component_wise (dependence) +/// +/// @defgroup gtx_norm GLM_GTX_norm +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Various ways to compute vector norms. + +#pragma once + +// Dependency: +#include "../geometric.hpp" +#include "../gtx/quaternion.hpp" +#include "../gtx/component_wise.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_norm is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_norm extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_norm + /// @{ + + /// Returns the squared length of x. + /// From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T length2(vec const& x); + + /// Returns the squared distance between p0 and p1, i.e., length2(p0 - p1). + /// From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T distance2(vec const& p0, vec const& p1); + + //! Returns the L1 norm between x and y. + //! From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T l1Norm(vec<3, T, Q> const& x, vec<3, T, Q> const& y); + + //! Returns the L1 norm of v. + //! From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T l1Norm(vec<3, T, Q> const& v); + + //! Returns the L2 norm between x and y. + //! From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T l2Norm(vec<3, T, Q> const& x, vec<3, T, Q> const& y); + + //! Returns the L2 norm of v. + //! From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T l2Norm(vec<3, T, Q> const& x); + + //! Returns the L norm between x and y. + //! From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T lxNorm(vec<3, T, Q> const& x, vec<3, T, Q> const& y, unsigned int Depth); + + //! Returns the L norm of v. + //! From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T lxNorm(vec<3, T, Q> const& x, unsigned int Depth); + + //! Returns the LMax norm between x and y. + //! From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T lMaxNorm(vec<3, T, Q> const& x, vec<3, T, Q> const& y); + + //! Returns the LMax norm of v. + //! From GLM_GTX_norm extension. + template + GLM_FUNC_DECL T lMaxNorm(vec<3, T, Q> const& x); + + /// @} +}//namespace glm + +#include "norm.inl" diff --git a/MacroLibX/third_party/glm/gtx/norm.inl b/MacroLibX/third_party/glm/gtx/norm.inl new file mode 100644 index 0000000..6db561b --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/norm.inl @@ -0,0 +1,95 @@ +/// @ref gtx_norm + +#include "../detail/qualifier.hpp" + +namespace glm{ +namespace detail +{ + template + struct compute_length2 + { + GLM_FUNC_QUALIFIER static T call(vec const& v) + { + return dot(v, v); + } + }; +}//namespace detail + + template + GLM_FUNC_QUALIFIER genType length2(genType x) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'length2' accepts only floating-point inputs"); + return x * x; + } + + template + GLM_FUNC_QUALIFIER T length2(vec const& v) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'length2' accepts only floating-point inputs"); + return detail::compute_length2::value>::call(v); + } + + template + GLM_FUNC_QUALIFIER T distance2(T p0, T p1) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'distance2' accepts only floating-point inputs"); + return length2(p1 - p0); + } + + template + GLM_FUNC_QUALIFIER T distance2(vec const& p0, vec const& p1) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'distance2' accepts only floating-point inputs"); + return length2(p1 - p0); + } + + template + GLM_FUNC_QUALIFIER T l1Norm(vec<3, T, Q> const& a, vec<3, T, Q> const& b) + { + return abs(b.x - a.x) + abs(b.y - a.y) + abs(b.z - a.z); + } + + template + GLM_FUNC_QUALIFIER T l1Norm(vec<3, T, Q> const& v) + { + return abs(v.x) + abs(v.y) + abs(v.z); + } + + template + GLM_FUNC_QUALIFIER T l2Norm(vec<3, T, Q> const& a, vec<3, T, Q> const& b + ) + { + return length(b - a); + } + + template + GLM_FUNC_QUALIFIER T l2Norm(vec<3, T, Q> const& v) + { + return length(v); + } + + template + GLM_FUNC_QUALIFIER T lxNorm(vec<3, T, Q> const& x, vec<3, T, Q> const& y, unsigned int Depth) + { + return pow(pow(abs(y.x - x.x), T(Depth)) + pow(abs(y.y - x.y), T(Depth)) + pow(abs(y.z - x.z), T(Depth)), T(1) / T(Depth)); + } + + template + GLM_FUNC_QUALIFIER T lxNorm(vec<3, T, Q> const& v, unsigned int Depth) + { + return pow(pow(abs(v.x), T(Depth)) + pow(abs(v.y), T(Depth)) + pow(abs(v.z), T(Depth)), T(1) / T(Depth)); + } + + template + GLM_FUNC_QUALIFIER T lMaxNorm(vec<3, T, Q> const& a, vec<3, T, Q> const& b) + { + return compMax(abs(b - a)); + } + + template + GLM_FUNC_QUALIFIER T lMaxNorm(vec<3, T, Q> const& v) + { + return compMax(abs(v)); + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/normal.hpp b/MacroLibX/third_party/glm/gtx/normal.hpp new file mode 100644 index 0000000..068682f --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/normal.hpp @@ -0,0 +1,41 @@ +/// @ref gtx_normal +/// @file glm/gtx/normal.hpp +/// +/// @see core (dependence) +/// @see gtx_extented_min_max (dependence) +/// +/// @defgroup gtx_normal GLM_GTX_normal +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Compute the normal of a triangle. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_normal is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_normal extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_normal + /// @{ + + /// Computes triangle normal from triangle points. + /// + /// @see gtx_normal + template + GLM_FUNC_DECL vec<3, T, Q> triangleNormal(vec<3, T, Q> const& p1, vec<3, T, Q> const& p2, vec<3, T, Q> const& p3); + + /// @} +}//namespace glm + +#include "normal.inl" diff --git a/MacroLibX/third_party/glm/gtx/normal.inl b/MacroLibX/third_party/glm/gtx/normal.inl new file mode 100644 index 0000000..74f9fc9 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/normal.inl @@ -0,0 +1,15 @@ +/// @ref gtx_normal + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> triangleNormal + ( + vec<3, T, Q> const& p1, + vec<3, T, Q> const& p2, + vec<3, T, Q> const& p3 + ) + { + return normalize(cross(p1 - p2, p1 - p3)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/normalize_dot.hpp b/MacroLibX/third_party/glm/gtx/normalize_dot.hpp new file mode 100644 index 0000000..5195802 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/normalize_dot.hpp @@ -0,0 +1,49 @@ +/// @ref gtx_normalize_dot +/// @file glm/gtx/normalize_dot.hpp +/// +/// @see core (dependence) +/// @see gtx_fast_square_root (dependence) +/// +/// @defgroup gtx_normalize_dot GLM_GTX_normalize_dot +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Dot product of vectors that need to be normalize with a single square root. + +#pragma once + +// Dependency: +#include "../gtx/fast_square_root.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_normalize_dot is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_normalize_dot extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_normalize_dot + /// @{ + + /// Normalize parameters and returns the dot product of x and y. + /// It's faster that dot(normalize(x), normalize(y)). + /// + /// @see gtx_normalize_dot extension. + template + GLM_FUNC_DECL T normalizeDot(vec const& x, vec const& y); + + /// Normalize parameters and returns the dot product of x and y. + /// Faster that dot(fastNormalize(x), fastNormalize(y)). + /// + /// @see gtx_normalize_dot extension. + template + GLM_FUNC_DECL T fastNormalizeDot(vec const& x, vec const& y); + + /// @} +}//namespace glm + +#include "normalize_dot.inl" diff --git a/MacroLibX/third_party/glm/gtx/normalize_dot.inl b/MacroLibX/third_party/glm/gtx/normalize_dot.inl new file mode 100644 index 0000000..7bcd9a5 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/normalize_dot.inl @@ -0,0 +1,16 @@ +/// @ref gtx_normalize_dot + +namespace glm +{ + template + GLM_FUNC_QUALIFIER T normalizeDot(vec const& x, vec const& y) + { + return glm::dot(x, y) * glm::inversesqrt(glm::dot(x, x) * glm::dot(y, y)); + } + + template + GLM_FUNC_QUALIFIER T fastNormalizeDot(vec const& x, vec const& y) + { + return glm::dot(x, y) * glm::fastInverseSqrt(glm::dot(x, x) * glm::dot(y, y)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/number_precision.hpp b/MacroLibX/third_party/glm/gtx/number_precision.hpp new file mode 100644 index 0000000..3a606bd --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/number_precision.hpp @@ -0,0 +1,61 @@ +/// @ref gtx_number_precision +/// @file glm/gtx/number_precision.hpp +/// +/// @see core (dependence) +/// @see gtc_type_precision (dependence) +/// @see gtc_quaternion (dependence) +/// +/// @defgroup gtx_number_precision GLM_GTX_number_precision +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Defined size types. + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/type_precision.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_number_precision is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_number_precision extension included") +# endif +#endif + +namespace glm{ +namespace gtx +{ + ///////////////////////////// + // Unsigned int vector types + + /// @addtogroup gtx_number_precision + /// @{ + + typedef u8 u8vec1; //!< \brief 8bit unsigned integer scalar. (from GLM_GTX_number_precision extension) + typedef u16 u16vec1; //!< \brief 16bit unsigned integer scalar. (from GLM_GTX_number_precision extension) + typedef u32 u32vec1; //!< \brief 32bit unsigned integer scalar. (from GLM_GTX_number_precision extension) + typedef u64 u64vec1; //!< \brief 64bit unsigned integer scalar. (from GLM_GTX_number_precision extension) + + ////////////////////// + // Float vector types + + typedef f32 f32vec1; //!< \brief Single-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) + typedef f64 f64vec1; //!< \brief Single-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) + + ////////////////////// + // Float matrix types + + typedef f32 f32mat1; //!< \brief Single-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) + typedef f32 f32mat1x1; //!< \brief Single-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) + typedef f64 f64mat1; //!< \brief Double-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) + typedef f64 f64mat1x1; //!< \brief Double-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) + + /// @} +}//namespace gtx +}//namespace glm + +#include "number_precision.inl" diff --git a/MacroLibX/third_party/glm/gtx/number_precision.inl b/MacroLibX/third_party/glm/gtx/number_precision.inl new file mode 100644 index 0000000..b39d71c --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/number_precision.inl @@ -0,0 +1,6 @@ +/// @ref gtx_number_precision + +namespace glm +{ + +} diff --git a/MacroLibX/third_party/glm/gtx/optimum_pow.hpp b/MacroLibX/third_party/glm/gtx/optimum_pow.hpp new file mode 100644 index 0000000..9284a47 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/optimum_pow.hpp @@ -0,0 +1,54 @@ +/// @ref gtx_optimum_pow +/// @file glm/gtx/optimum_pow.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_optimum_pow GLM_GTX_optimum_pow +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Integer exponentiation of power functions. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_optimum_pow is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_optimum_pow extension included") +# endif +#endif + +namespace glm{ +namespace gtx +{ + /// @addtogroup gtx_optimum_pow + /// @{ + + /// Returns x raised to the power of 2. + /// + /// @see gtx_optimum_pow + template + GLM_FUNC_DECL genType pow2(genType const& x); + + /// Returns x raised to the power of 3. + /// + /// @see gtx_optimum_pow + template + GLM_FUNC_DECL genType pow3(genType const& x); + + /// Returns x raised to the power of 4. + /// + /// @see gtx_optimum_pow + template + GLM_FUNC_DECL genType pow4(genType const& x); + + /// @} +}//namespace gtx +}//namespace glm + +#include "optimum_pow.inl" diff --git a/MacroLibX/third_party/glm/gtx/optimum_pow.inl b/MacroLibX/third_party/glm/gtx/optimum_pow.inl new file mode 100644 index 0000000..a26c19c --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/optimum_pow.inl @@ -0,0 +1,22 @@ +/// @ref gtx_optimum_pow + +namespace glm +{ + template + GLM_FUNC_QUALIFIER genType pow2(genType const& x) + { + return x * x; + } + + template + GLM_FUNC_QUALIFIER genType pow3(genType const& x) + { + return x * x * x; + } + + template + GLM_FUNC_QUALIFIER genType pow4(genType const& x) + { + return (x * x) * (x * x); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/orthonormalize.hpp b/MacroLibX/third_party/glm/gtx/orthonormalize.hpp new file mode 100644 index 0000000..3e004fb --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/orthonormalize.hpp @@ -0,0 +1,49 @@ +/// @ref gtx_orthonormalize +/// @file glm/gtx/orthonormalize.hpp +/// +/// @see core (dependence) +/// @see gtx_extented_min_max (dependence) +/// +/// @defgroup gtx_orthonormalize GLM_GTX_orthonormalize +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Orthonormalize matrices. + +#pragma once + +// Dependency: +#include "../vec3.hpp" +#include "../mat3x3.hpp" +#include "../geometric.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_orthonormalize is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_orthonormalize extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_orthonormalize + /// @{ + + /// Returns the orthonormalized matrix of m. + /// + /// @see gtx_orthonormalize + template + GLM_FUNC_DECL mat<3, 3, T, Q> orthonormalize(mat<3, 3, T, Q> const& m); + + /// Orthonormalizes x according y. + /// + /// @see gtx_orthonormalize + template + GLM_FUNC_DECL vec<3, T, Q> orthonormalize(vec<3, T, Q> const& x, vec<3, T, Q> const& y); + + /// @} +}//namespace glm + +#include "orthonormalize.inl" diff --git a/MacroLibX/third_party/glm/gtx/orthonormalize.inl b/MacroLibX/third_party/glm/gtx/orthonormalize.inl new file mode 100644 index 0000000..cb553ba --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/orthonormalize.inl @@ -0,0 +1,29 @@ +/// @ref gtx_orthonormalize + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> orthonormalize(mat<3, 3, T, Q> const& m) + { + mat<3, 3, T, Q> r = m; + + r[0] = normalize(r[0]); + + T d0 = dot(r[0], r[1]); + r[1] -= r[0] * d0; + r[1] = normalize(r[1]); + + T d1 = dot(r[1], r[2]); + d0 = dot(r[0], r[2]); + r[2] -= r[0] * d0 + r[1] * d1; + r[2] = normalize(r[2]); + + return r; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> orthonormalize(vec<3, T, Q> const& x, vec<3, T, Q> const& y) + { + return normalize(x - y * dot(y, x)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/perpendicular.hpp b/MacroLibX/third_party/glm/gtx/perpendicular.hpp new file mode 100644 index 0000000..72b77b6 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/perpendicular.hpp @@ -0,0 +1,41 @@ +/// @ref gtx_perpendicular +/// @file glm/gtx/perpendicular.hpp +/// +/// @see core (dependence) +/// @see gtx_projection (dependence) +/// +/// @defgroup gtx_perpendicular GLM_GTX_perpendicular +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Perpendicular of a vector from other one + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtx/projection.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_perpendicular is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_perpendicular extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_perpendicular + /// @{ + + //! Projects x a perpendicular axis of Normal. + //! From GLM_GTX_perpendicular extension. + template + GLM_FUNC_DECL genType perp(genType const& x, genType const& Normal); + + /// @} +}//namespace glm + +#include "perpendicular.inl" diff --git a/MacroLibX/third_party/glm/gtx/perpendicular.inl b/MacroLibX/third_party/glm/gtx/perpendicular.inl new file mode 100644 index 0000000..1e72f33 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/perpendicular.inl @@ -0,0 +1,10 @@ +/// @ref gtx_perpendicular + +namespace glm +{ + template + GLM_FUNC_QUALIFIER genType perp(genType const& x, genType const& Normal) + { + return x - proj(x, Normal); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/polar_coordinates.hpp b/MacroLibX/third_party/glm/gtx/polar_coordinates.hpp new file mode 100644 index 0000000..b399112 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/polar_coordinates.hpp @@ -0,0 +1,48 @@ +/// @ref gtx_polar_coordinates +/// @file glm/gtx/polar_coordinates.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_polar_coordinates GLM_GTX_polar_coordinates +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Conversion from Euclidean space to polar space and revert. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_polar_coordinates is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_polar_coordinates extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_polar_coordinates + /// @{ + + /// Convert Euclidean to Polar coordinates, x is the xz distance, y, the latitude and z the longitude. + /// + /// @see gtx_polar_coordinates + template + GLM_FUNC_DECL vec<3, T, Q> polar( + vec<3, T, Q> const& euclidean); + + /// Convert Polar to Euclidean coordinates. + /// + /// @see gtx_polar_coordinates + template + GLM_FUNC_DECL vec<3, T, Q> euclidean( + vec<2, T, Q> const& polar); + + /// @} +}//namespace glm + +#include "polar_coordinates.inl" diff --git a/MacroLibX/third_party/glm/gtx/polar_coordinates.inl b/MacroLibX/third_party/glm/gtx/polar_coordinates.inl new file mode 100644 index 0000000..371c8dd --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/polar_coordinates.inl @@ -0,0 +1,36 @@ +/// @ref gtx_polar_coordinates + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> polar + ( + vec<3, T, Q> const& euclidean + ) + { + T const Length(length(euclidean)); + vec<3, T, Q> const tmp(euclidean / Length); + T const xz_dist(sqrt(tmp.x * tmp.x + tmp.z * tmp.z)); + + return vec<3, T, Q>( + asin(tmp.y), // latitude + atan(tmp.x, tmp.z), // longitude + xz_dist); // xz distance + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> euclidean + ( + vec<2, T, Q> const& polar + ) + { + T const latitude(polar.x); + T const longitude(polar.y); + + return vec<3, T, Q>( + cos(latitude) * sin(longitude), + sin(latitude), + cos(latitude) * cos(longitude)); + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/projection.hpp b/MacroLibX/third_party/glm/gtx/projection.hpp new file mode 100644 index 0000000..678f3ad --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/projection.hpp @@ -0,0 +1,43 @@ +/// @ref gtx_projection +/// @file glm/gtx/projection.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_projection GLM_GTX_projection +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Projection of a vector to other one + +#pragma once + +// Dependency: +#include "../geometric.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_projection is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_projection extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_projection + /// @{ + + /// Projects x on Normal. + /// + /// @param[in] x A vector to project + /// @param[in] Normal A normal that doesn't need to be of unit length. + /// + /// @see gtx_projection + template + GLM_FUNC_DECL genType proj(genType const& x, genType const& Normal); + + /// @} +}//namespace glm + +#include "projection.inl" diff --git a/MacroLibX/third_party/glm/gtx/projection.inl b/MacroLibX/third_party/glm/gtx/projection.inl new file mode 100644 index 0000000..f23f884 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/projection.inl @@ -0,0 +1,10 @@ +/// @ref gtx_projection + +namespace glm +{ + template + GLM_FUNC_QUALIFIER genType proj(genType const& x, genType const& Normal) + { + return glm::dot(x, Normal) / glm::dot(Normal, Normal) * Normal; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/quaternion.hpp b/MacroLibX/third_party/glm/gtx/quaternion.hpp new file mode 100644 index 0000000..5c2b5ad --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/quaternion.hpp @@ -0,0 +1,174 @@ +/// @ref gtx_quaternion +/// @file glm/gtx/quaternion.hpp +/// +/// @see core (dependence) +/// @see gtx_extented_min_max (dependence) +/// +/// @defgroup gtx_quaternion GLM_GTX_quaternion +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Extented quaternion types and functions + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/constants.hpp" +#include "../gtc/quaternion.hpp" +#include "../ext/quaternion_exponential.hpp" +#include "../gtx/norm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_quaternion is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_quaternion extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_quaternion + /// @{ + + /// Create an identity quaternion. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL GLM_CONSTEXPR qua quat_identity(); + + /// Compute a cross product between a quaternion and a vector. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL vec<3, T, Q> cross( + qua const& q, + vec<3, T, Q> const& v); + + //! Compute a cross product between a vector and a quaternion. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL vec<3, T, Q> cross( + vec<3, T, Q> const& v, + qua const& q); + + //! Compute a point on a path according squad equation. + //! q1 and q2 are control points; s1 and s2 are intermediate control points. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL qua squad( + qua const& q1, + qua const& q2, + qua const& s1, + qua const& s2, + T const& h); + + //! Returns an intermediate control point for squad interpolation. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL qua intermediate( + qua const& prev, + qua const& curr, + qua const& next); + + //! Returns quarternion square root. + /// + /// @see gtx_quaternion + //template + //qua sqrt( + // qua const& q); + + //! Rotates a 3 components vector by a quaternion. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL vec<3, T, Q> rotate( + qua const& q, + vec<3, T, Q> const& v); + + /// Rotates a 4 components vector by a quaternion. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL vec<4, T, Q> rotate( + qua const& q, + vec<4, T, Q> const& v); + + /// Extract the real component of a quaternion. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL T extractRealComponent( + qua const& q); + + /// Converts a quaternion to a 3 * 3 matrix. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL mat<3, 3, T, Q> toMat3( + qua const& x){return mat3_cast(x);} + + /// Converts a quaternion to a 4 * 4 matrix. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL mat<4, 4, T, Q> toMat4( + qua const& x){return mat4_cast(x);} + + /// Converts a 3 * 3 matrix to a quaternion. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL qua toQuat( + mat<3, 3, T, Q> const& x){return quat_cast(x);} + + /// Converts a 4 * 4 matrix to a quaternion. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL qua toQuat( + mat<4, 4, T, Q> const& x){return quat_cast(x);} + + /// Quaternion interpolation using the rotation short path. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL qua shortMix( + qua const& x, + qua const& y, + T const& a); + + /// Quaternion normalized linear interpolation. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL qua fastMix( + qua const& x, + qua const& y, + T const& a); + + /// Compute the rotation between two vectors. + /// @param orig vector, needs to be normalized + /// @param dest vector, needs to be normalized + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL qua rotation( + vec<3, T, Q> const& orig, + vec<3, T, Q> const& dest); + + /// Returns the squared length of x. + /// + /// @see gtx_quaternion + template + GLM_FUNC_DECL GLM_CONSTEXPR T length2(qua const& q); + + /// @} +}//namespace glm + +#include "quaternion.inl" diff --git a/MacroLibX/third_party/glm/gtx/quaternion.inl b/MacroLibX/third_party/glm/gtx/quaternion.inl new file mode 100644 index 0000000..d125bcc --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/quaternion.inl @@ -0,0 +1,159 @@ +/// @ref gtx_quaternion + +#include +#include "../gtc/constants.hpp" + +namespace glm +{ + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua quat_identity() + { + return qua(static_cast(1), static_cast(0), static_cast(0), static_cast(0)); + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> cross(vec<3, T, Q> const& v, qua const& q) + { + return inverse(q) * v; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> cross(qua const& q, vec<3, T, Q> const& v) + { + return q * v; + } + + template + GLM_FUNC_QUALIFIER qua squad + ( + qua const& q1, + qua const& q2, + qua const& s1, + qua const& s2, + T const& h) + { + return mix(mix(q1, q2, h), mix(s1, s2, h), static_cast(2) * (static_cast(1) - h) * h); + } + + template + GLM_FUNC_QUALIFIER qua intermediate + ( + qua const& prev, + qua const& curr, + qua const& next + ) + { + qua invQuat = inverse(curr); + return exp((log(next * invQuat) + log(prev * invQuat)) / static_cast(-4)) * curr; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rotate(qua const& q, vec<3, T, Q> const& v) + { + return q * v; + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> rotate(qua const& q, vec<4, T, Q> const& v) + { + return q * v; + } + + template + GLM_FUNC_QUALIFIER T extractRealComponent(qua const& q) + { + T w = static_cast(1) - q.x * q.x - q.y * q.y - q.z * q.z; + if(w < T(0)) + return T(0); + else + return -sqrt(w); + } + + template + GLM_FUNC_QUALIFIER GLM_CONSTEXPR T length2(qua const& q) + { + return q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w; + } + + template + GLM_FUNC_QUALIFIER qua shortMix(qua const& x, qua const& y, T const& a) + { + if(a <= static_cast(0)) return x; + if(a >= static_cast(1)) return y; + + T fCos = dot(x, y); + qua y2(y); //BUG!!! qua y2; + if(fCos < static_cast(0)) + { + y2 = -y; + fCos = -fCos; + } + + //if(fCos > 1.0f) // problem + T k0, k1; + if(fCos > (static_cast(1) - epsilon())) + { + k0 = static_cast(1) - a; + k1 = static_cast(0) + a; //BUG!!! 1.0f + a; + } + else + { + T fSin = sqrt(T(1) - fCos * fCos); + T fAngle = atan(fSin, fCos); + T fOneOverSin = static_cast(1) / fSin; + k0 = sin((static_cast(1) - a) * fAngle) * fOneOverSin; + k1 = sin((static_cast(0) + a) * fAngle) * fOneOverSin; + } + + return qua( + k0 * x.w + k1 * y2.w, + k0 * x.x + k1 * y2.x, + k0 * x.y + k1 * y2.y, + k0 * x.z + k1 * y2.z); + } + + template + GLM_FUNC_QUALIFIER qua fastMix(qua const& x, qua const& y, T const& a) + { + return glm::normalize(x * (static_cast(1) - a) + (y * a)); + } + + template + GLM_FUNC_QUALIFIER qua rotation(vec<3, T, Q> const& orig, vec<3, T, Q> const& dest) + { + T cosTheta = dot(orig, dest); + vec<3, T, Q> rotationAxis; + + if(cosTheta >= static_cast(1) - epsilon()) { + // orig and dest point in the same direction + return quat_identity(); + } + + if(cosTheta < static_cast(-1) + epsilon()) + { + // special case when vectors in opposite directions : + // there is no "ideal" rotation axis + // So guess one; any will do as long as it's perpendicular to start + // This implementation favors a rotation around the Up axis (Y), + // since it's often what you want to do. + rotationAxis = cross(vec<3, T, Q>(0, 0, 1), orig); + if(length2(rotationAxis) < epsilon()) // bad luck, they were parallel, try again! + rotationAxis = cross(vec<3, T, Q>(1, 0, 0), orig); + + rotationAxis = normalize(rotationAxis); + return angleAxis(pi(), rotationAxis); + } + + // Implementation from Stan Melax's Game Programming Gems 1 article + rotationAxis = cross(orig, dest); + + T s = sqrt((T(1) + cosTheta) * static_cast(2)); + T invs = static_cast(1) / s; + + return qua( + s * static_cast(0.5f), + rotationAxis.x * invs, + rotationAxis.y * invs, + rotationAxis.z * invs); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/range.hpp b/MacroLibX/third_party/glm/gtx/range.hpp new file mode 100644 index 0000000..93bcb9a --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/range.hpp @@ -0,0 +1,98 @@ +/// @ref gtx_range +/// @file glm/gtx/range.hpp +/// @author Joshua Moerman +/// +/// @defgroup gtx_range GLM_GTX_range +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Defines begin and end for vectors and matrices. Useful for range-based for loop. +/// The range is defined over the elements, not over columns or rows (e.g. mat4 has 16 elements). + +#pragma once + +// Dependencies +#include "../detail/setup.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_range is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_range extension included") +# endif +#endif + +#include "../gtc/type_ptr.hpp" +#include "../gtc/vec1.hpp" + +namespace glm +{ + /// @addtogroup gtx_range + /// @{ + +# if GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(push) +# pragma warning(disable : 4100) // unreferenced formal parameter +# endif + + template + inline length_t components(vec<1, T, Q> const& v) + { + return v.length(); + } + + template + inline length_t components(vec<2, T, Q> const& v) + { + return v.length(); + } + + template + inline length_t components(vec<3, T, Q> const& v) + { + return v.length(); + } + + template + inline length_t components(vec<4, T, Q> const& v) + { + return v.length(); + } + + template + inline length_t components(genType const& m) + { + return m.length() * m[0].length(); + } + + template + inline typename genType::value_type const * begin(genType const& v) + { + return value_ptr(v); + } + + template + inline typename genType::value_type const * end(genType const& v) + { + return begin(v) + components(v); + } + + template + inline typename genType::value_type * begin(genType& v) + { + return value_ptr(v); + } + + template + inline typename genType::value_type * end(genType& v) + { + return begin(v) + components(v); + } + +# if GLM_COMPILER & GLM_COMPILER_VC +# pragma warning(pop) +# endif + + /// @} +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/raw_data.hpp b/MacroLibX/third_party/glm/gtx/raw_data.hpp new file mode 100644 index 0000000..86cbe77 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/raw_data.hpp @@ -0,0 +1,51 @@ +/// @ref gtx_raw_data +/// @file glm/gtx/raw_data.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_raw_data GLM_GTX_raw_data +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Projection of a vector to other one + +#pragma once + +// Dependencies +#include "../ext/scalar_uint_sized.hpp" +#include "../detail/setup.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_raw_data is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_raw_data extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_raw_data + /// @{ + + //! Type for byte numbers. + //! From GLM_GTX_raw_data extension. + typedef detail::uint8 byte; + + //! Type for word numbers. + //! From GLM_GTX_raw_data extension. + typedef detail::uint16 word; + + //! Type for dword numbers. + //! From GLM_GTX_raw_data extension. + typedef detail::uint32 dword; + + //! Type for qword numbers. + //! From GLM_GTX_raw_data extension. + typedef detail::uint64 qword; + + /// @} +}// namespace glm + +#include "raw_data.inl" diff --git a/MacroLibX/third_party/glm/gtx/raw_data.inl b/MacroLibX/third_party/glm/gtx/raw_data.inl new file mode 100644 index 0000000..c740317 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/raw_data.inl @@ -0,0 +1,2 @@ +/// @ref gtx_raw_data + diff --git a/MacroLibX/third_party/glm/gtx/rotate_normalized_axis.hpp b/MacroLibX/third_party/glm/gtx/rotate_normalized_axis.hpp new file mode 100644 index 0000000..2103ca0 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/rotate_normalized_axis.hpp @@ -0,0 +1,68 @@ +/// @ref gtx_rotate_normalized_axis +/// @file glm/gtx/rotate_normalized_axis.hpp +/// +/// @see core (dependence) +/// @see gtc_matrix_transform +/// @see gtc_quaternion +/// +/// @defgroup gtx_rotate_normalized_axis GLM_GTX_rotate_normalized_axis +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Quaternions and matrices rotations around normalized axis. + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/epsilon.hpp" +#include "../gtc/quaternion.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_rotate_normalized_axis is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_rotate_normalized_axis extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_rotate_normalized_axis + /// @{ + + /// Builds a rotation 4 * 4 matrix created from a normalized axis and an angle. + /// + /// @param m Input matrix multiplied by this rotation matrix. + /// @param angle Rotation angle expressed in radians. + /// @param axis Rotation axis, must be normalized. + /// @tparam T Value type used to build the matrix. Currently supported: half (not recommended), float or double. + /// + /// @see gtx_rotate_normalized_axis + /// @see - rotate(T angle, T x, T y, T z) + /// @see - rotate(mat<4, 4, T, Q> const& m, T angle, T x, T y, T z) + /// @see - rotate(T angle, vec<3, T, Q> const& v) + template + GLM_FUNC_DECL mat<4, 4, T, Q> rotateNormalizedAxis( + mat<4, 4, T, Q> const& m, + T const& angle, + vec<3, T, Q> const& axis); + + /// Rotates a quaternion from a vector of 3 components normalized axis and an angle. + /// + /// @param q Source orientation + /// @param angle Angle expressed in radians. + /// @param axis Normalized axis of the rotation, must be normalized. + /// + /// @see gtx_rotate_normalized_axis + template + GLM_FUNC_DECL qua rotateNormalizedAxis( + qua const& q, + T const& angle, + vec<3, T, Q> const& axis); + + /// @} +}//namespace glm + +#include "rotate_normalized_axis.inl" diff --git a/MacroLibX/third_party/glm/gtx/rotate_normalized_axis.inl b/MacroLibX/third_party/glm/gtx/rotate_normalized_axis.inl new file mode 100644 index 0000000..b2e9278 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/rotate_normalized_axis.inl @@ -0,0 +1,58 @@ +/// @ref gtx_rotate_normalized_axis + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rotateNormalizedAxis + ( + mat<4, 4, T, Q> const& m, + T const& angle, + vec<3, T, Q> const& v + ) + { + T const a = angle; + T const c = cos(a); + T const s = sin(a); + + vec<3, T, Q> const axis(v); + + vec<3, T, Q> const temp((static_cast(1) - c) * axis); + + mat<4, 4, T, Q> Rotate; + Rotate[0][0] = c + temp[0] * axis[0]; + Rotate[0][1] = 0 + temp[0] * axis[1] + s * axis[2]; + Rotate[0][2] = 0 + temp[0] * axis[2] - s * axis[1]; + + Rotate[1][0] = 0 + temp[1] * axis[0] - s * axis[2]; + Rotate[1][1] = c + temp[1] * axis[1]; + Rotate[1][2] = 0 + temp[1] * axis[2] + s * axis[0]; + + Rotate[2][0] = 0 + temp[2] * axis[0] + s * axis[1]; + Rotate[2][1] = 0 + temp[2] * axis[1] - s * axis[0]; + Rotate[2][2] = c + temp[2] * axis[2]; + + mat<4, 4, T, Q> Result; + Result[0] = m[0] * Rotate[0][0] + m[1] * Rotate[0][1] + m[2] * Rotate[0][2]; + Result[1] = m[0] * Rotate[1][0] + m[1] * Rotate[1][1] + m[2] * Rotate[1][2]; + Result[2] = m[0] * Rotate[2][0] + m[1] * Rotate[2][1] + m[2] * Rotate[2][2]; + Result[3] = m[3]; + return Result; + } + + template + GLM_FUNC_QUALIFIER qua rotateNormalizedAxis + ( + qua const& q, + T const& angle, + vec<3, T, Q> const& v + ) + { + vec<3, T, Q> const Tmp(v); + + T const AngleRad(angle); + T const Sin = sin(AngleRad * T(0.5)); + + return q * qua(cos(AngleRad * static_cast(0.5)), Tmp.x * Sin, Tmp.y * Sin, Tmp.z * Sin); + //return gtc::quaternion::cross(q, tquat(cos(AngleRad * T(0.5)), Tmp.x * fSin, Tmp.y * fSin, Tmp.z * fSin)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/rotate_vector.hpp b/MacroLibX/third_party/glm/gtx/rotate_vector.hpp new file mode 100644 index 0000000..dcd5b95 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/rotate_vector.hpp @@ -0,0 +1,123 @@ +/// @ref gtx_rotate_vector +/// @file glm/gtx/rotate_vector.hpp +/// +/// @see core (dependence) +/// @see gtx_transform (dependence) +/// +/// @defgroup gtx_rotate_vector GLM_GTX_rotate_vector +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Function to directly rotate a vector + +#pragma once + +// Dependency: +#include "../gtx/transform.hpp" +#include "../gtc/epsilon.hpp" +#include "../ext/vector_relational.hpp" +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_rotate_vector is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_rotate_vector extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_rotate_vector + /// @{ + + /// Returns Spherical interpolation between two vectors + /// + /// @param x A first vector + /// @param y A second vector + /// @param a Interpolation factor. The interpolation is defined beyond the range [0, 1]. + /// + /// @see gtx_rotate_vector + template + GLM_FUNC_DECL vec<3, T, Q> slerp( + vec<3, T, Q> const& x, + vec<3, T, Q> const& y, + T const& a); + + //! Rotate a two dimensional vector. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<2, T, Q> rotate( + vec<2, T, Q> const& v, + T const& angle); + + //! Rotate a three dimensional vector around an axis. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<3, T, Q> rotate( + vec<3, T, Q> const& v, + T const& angle, + vec<3, T, Q> const& normal); + + //! Rotate a four dimensional vector around an axis. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<4, T, Q> rotate( + vec<4, T, Q> const& v, + T const& angle, + vec<3, T, Q> const& normal); + + //! Rotate a three dimensional vector around the X axis. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<3, T, Q> rotateX( + vec<3, T, Q> const& v, + T const& angle); + + //! Rotate a three dimensional vector around the Y axis. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<3, T, Q> rotateY( + vec<3, T, Q> const& v, + T const& angle); + + //! Rotate a three dimensional vector around the Z axis. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<3, T, Q> rotateZ( + vec<3, T, Q> const& v, + T const& angle); + + //! Rotate a four dimensional vector around the X axis. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<4, T, Q> rotateX( + vec<4, T, Q> const& v, + T const& angle); + + //! Rotate a four dimensional vector around the Y axis. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<4, T, Q> rotateY( + vec<4, T, Q> const& v, + T const& angle); + + //! Rotate a four dimensional vector around the Z axis. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL vec<4, T, Q> rotateZ( + vec<4, T, Q> const& v, + T const& angle); + + //! Build a rotation matrix from a normal and a up vector. + //! From GLM_GTX_rotate_vector extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> orientation( + vec<3, T, Q> const& Normal, + vec<3, T, Q> const& Up); + + /// @} +}//namespace glm + +#include "rotate_vector.inl" diff --git a/MacroLibX/third_party/glm/gtx/rotate_vector.inl b/MacroLibX/third_party/glm/gtx/rotate_vector.inl new file mode 100644 index 0000000..f8136e7 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/rotate_vector.inl @@ -0,0 +1,187 @@ +/// @ref gtx_rotate_vector + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec<3, T, Q> slerp + ( + vec<3, T, Q> const& x, + vec<3, T, Q> const& y, + T const& a + ) + { + // get cosine of angle between vectors (-1 -> 1) + T CosAlpha = dot(x, y); + // get angle (0 -> pi) + T Alpha = acos(CosAlpha); + // get sine of angle between vectors (0 -> 1) + T SinAlpha = sin(Alpha); + // this breaks down when SinAlpha = 0, i.e. Alpha = 0 or pi + T t1 = sin((static_cast(1) - a) * Alpha) / SinAlpha; + T t2 = sin(a * Alpha) / SinAlpha; + + // interpolate src vectors + return x * t1 + y * t2; + } + + template + GLM_FUNC_QUALIFIER vec<2, T, Q> rotate + ( + vec<2, T, Q> const& v, + T const& angle + ) + { + vec<2, T, Q> Result; + T const Cos(cos(angle)); + T const Sin(sin(angle)); + + Result.x = v.x * Cos - v.y * Sin; + Result.y = v.x * Sin + v.y * Cos; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rotate + ( + vec<3, T, Q> const& v, + T const& angle, + vec<3, T, Q> const& normal + ) + { + return mat<3, 3, T, Q>(glm::rotate(angle, normal)) * v; + } + /* + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rotateGTX( + const vec<3, T, Q>& x, + T angle, + const vec<3, T, Q>& normal) + { + const T Cos = cos(radians(angle)); + const T Sin = sin(radians(angle)); + return x * Cos + ((x * normal) * (T(1) - Cos)) * normal + cross(x, normal) * Sin; + } + */ + template + GLM_FUNC_QUALIFIER vec<4, T, Q> rotate + ( + vec<4, T, Q> const& v, + T const& angle, + vec<3, T, Q> const& normal + ) + { + return rotate(angle, normal) * v; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rotateX + ( + vec<3, T, Q> const& v, + T const& angle + ) + { + vec<3, T, Q> Result(v); + T const Cos(cos(angle)); + T const Sin(sin(angle)); + + Result.y = v.y * Cos - v.z * Sin; + Result.z = v.y * Sin + v.z * Cos; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rotateY + ( + vec<3, T, Q> const& v, + T const& angle + ) + { + vec<3, T, Q> Result = v; + T const Cos(cos(angle)); + T const Sin(sin(angle)); + + Result.x = v.x * Cos + v.z * Sin; + Result.z = -v.x * Sin + v.z * Cos; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<3, T, Q> rotateZ + ( + vec<3, T, Q> const& v, + T const& angle + ) + { + vec<3, T, Q> Result = v; + T const Cos(cos(angle)); + T const Sin(sin(angle)); + + Result.x = v.x * Cos - v.y * Sin; + Result.y = v.x * Sin + v.y * Cos; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> rotateX + ( + vec<4, T, Q> const& v, + T const& angle + ) + { + vec<4, T, Q> Result = v; + T const Cos(cos(angle)); + T const Sin(sin(angle)); + + Result.y = v.y * Cos - v.z * Sin; + Result.z = v.y * Sin + v.z * Cos; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> rotateY + ( + vec<4, T, Q> const& v, + T const& angle + ) + { + vec<4, T, Q> Result = v; + T const Cos(cos(angle)); + T const Sin(sin(angle)); + + Result.x = v.x * Cos + v.z * Sin; + Result.z = -v.x * Sin + v.z * Cos; + return Result; + } + + template + GLM_FUNC_QUALIFIER vec<4, T, Q> rotateZ + ( + vec<4, T, Q> const& v, + T const& angle + ) + { + vec<4, T, Q> Result = v; + T const Cos(cos(angle)); + T const Sin(sin(angle)); + + Result.x = v.x * Cos - v.y * Sin; + Result.y = v.x * Sin + v.y * Cos; + return Result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> orientation + ( + vec<3, T, Q> const& Normal, + vec<3, T, Q> const& Up + ) + { + if(all(equal(Normal, Up, epsilon()))) + return mat<4, 4, T, Q>(static_cast(1)); + + vec<3, T, Q> RotationAxis = cross(Up, Normal); + T Angle = acos(dot(Normal, Up)); + + return rotate(Angle, RotationAxis); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/scalar_multiplication.hpp b/MacroLibX/third_party/glm/gtx/scalar_multiplication.hpp new file mode 100644 index 0000000..496ba19 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/scalar_multiplication.hpp @@ -0,0 +1,75 @@ +/// @ref gtx +/// @file glm/gtx/scalar_multiplication.hpp +/// @author Joshua Moerman +/// +/// Include to use the features of this extension. +/// +/// Enables scalar multiplication for all types +/// +/// Since GLSL is very strict about types, the following (often used) combinations do not work: +/// double * vec4 +/// int * vec4 +/// vec4 / int +/// So we'll fix that! Of course "float * vec4" should remain the same (hence the enable_if magic) + +#pragma once + +#include "../detail/setup.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_scalar_multiplication is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_scalar_multiplication extension included") +# endif +#endif + +#include "../vec2.hpp" +#include "../vec3.hpp" +#include "../vec4.hpp" +#include "../mat2x2.hpp" +#include + +namespace glm +{ + template + using return_type_scalar_multiplication = typename std::enable_if< + !std::is_same::value // T may not be a float + && std::is_arithmetic::value, Vec // But it may be an int or double (no vec3 or mat3, ...) + >::type; + +#define GLM_IMPLEMENT_SCAL_MULT(Vec) \ + template \ + return_type_scalar_multiplication \ + operator*(T const& s, Vec rh){ \ + return rh *= static_cast(s); \ + } \ + \ + template \ + return_type_scalar_multiplication \ + operator*(Vec lh, T const& s){ \ + return lh *= static_cast(s); \ + } \ + \ + template \ + return_type_scalar_multiplication \ + operator/(Vec lh, T const& s){ \ + return lh *= 1.0f / static_cast(s); \ + } + +GLM_IMPLEMENT_SCAL_MULT(vec2) +GLM_IMPLEMENT_SCAL_MULT(vec3) +GLM_IMPLEMENT_SCAL_MULT(vec4) + +GLM_IMPLEMENT_SCAL_MULT(mat2) +GLM_IMPLEMENT_SCAL_MULT(mat2x3) +GLM_IMPLEMENT_SCAL_MULT(mat2x4) +GLM_IMPLEMENT_SCAL_MULT(mat3x2) +GLM_IMPLEMENT_SCAL_MULT(mat3) +GLM_IMPLEMENT_SCAL_MULT(mat3x4) +GLM_IMPLEMENT_SCAL_MULT(mat4x2) +GLM_IMPLEMENT_SCAL_MULT(mat4x3) +GLM_IMPLEMENT_SCAL_MULT(mat4) + +#undef GLM_IMPLEMENT_SCAL_MULT +} // namespace glm diff --git a/MacroLibX/third_party/glm/gtx/scalar_relational.hpp b/MacroLibX/third_party/glm/gtx/scalar_relational.hpp new file mode 100644 index 0000000..8be9c57 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/scalar_relational.hpp @@ -0,0 +1,36 @@ +/// @ref gtx_scalar_relational +/// @file glm/gtx/scalar_relational.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_scalar_relational GLM_GTX_scalar_relational +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Extend a position from a source to a position at a defined length. + +#pragma once + +// Dependency: +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_extend is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_extend extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_scalar_relational + /// @{ + + + + /// @} +}//namespace glm + +#include "scalar_relational.inl" diff --git a/MacroLibX/third_party/glm/gtx/scalar_relational.inl b/MacroLibX/third_party/glm/gtx/scalar_relational.inl new file mode 100644 index 0000000..c2a121c --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/scalar_relational.inl @@ -0,0 +1,88 @@ +/// @ref gtx_scalar_relational + +namespace glm +{ + template + GLM_FUNC_QUALIFIER bool lessThan + ( + T const& x, + T const& y + ) + { + return x < y; + } + + template + GLM_FUNC_QUALIFIER bool lessThanEqual + ( + T const& x, + T const& y + ) + { + return x <= y; + } + + template + GLM_FUNC_QUALIFIER bool greaterThan + ( + T const& x, + T const& y + ) + { + return x > y; + } + + template + GLM_FUNC_QUALIFIER bool greaterThanEqual + ( + T const& x, + T const& y + ) + { + return x >= y; + } + + template + GLM_FUNC_QUALIFIER bool equal + ( + T const& x, + T const& y + ) + { + return detail::compute_equal::is_iec559>::call(x, y); + } + + template + GLM_FUNC_QUALIFIER bool notEqual + ( + T const& x, + T const& y + ) + { + return !detail::compute_equal::is_iec559>::call(x, y); + } + + GLM_FUNC_QUALIFIER bool any + ( + bool const& x + ) + { + return x; + } + + GLM_FUNC_QUALIFIER bool all + ( + bool const& x + ) + { + return x; + } + + GLM_FUNC_QUALIFIER bool not_ + ( + bool const& x + ) + { + return !x; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/spline.hpp b/MacroLibX/third_party/glm/gtx/spline.hpp new file mode 100644 index 0000000..731c979 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/spline.hpp @@ -0,0 +1,65 @@ +/// @ref gtx_spline +/// @file glm/gtx/spline.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_spline GLM_GTX_spline +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Spline functions + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtx/optimum_pow.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_spline is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_spline extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_spline + /// @{ + + /// Return a point from a catmull rom curve. + /// @see gtx_spline extension. + template + GLM_FUNC_DECL genType catmullRom( + genType const& v1, + genType const& v2, + genType const& v3, + genType const& v4, + typename genType::value_type const& s); + + /// Return a point from a hermite curve. + /// @see gtx_spline extension. + template + GLM_FUNC_DECL genType hermite( + genType const& v1, + genType const& t1, + genType const& v2, + genType const& t2, + typename genType::value_type const& s); + + /// Return a point from a cubic curve. + /// @see gtx_spline extension. + template + GLM_FUNC_DECL genType cubic( + genType const& v1, + genType const& v2, + genType const& v3, + genType const& v4, + typename genType::value_type const& s); + + /// @} +}//namespace glm + +#include "spline.inl" diff --git a/MacroLibX/third_party/glm/gtx/spline.inl b/MacroLibX/third_party/glm/gtx/spline.inl new file mode 100644 index 0000000..c3fd056 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/spline.inl @@ -0,0 +1,60 @@ +/// @ref gtx_spline + +namespace glm +{ + template + GLM_FUNC_QUALIFIER genType catmullRom + ( + genType const& v1, + genType const& v2, + genType const& v3, + genType const& v4, + typename genType::value_type const& s + ) + { + typename genType::value_type s2 = pow2(s); + typename genType::value_type s3 = pow3(s); + + typename genType::value_type f1 = -s3 + typename genType::value_type(2) * s2 - s; + typename genType::value_type f2 = typename genType::value_type(3) * s3 - typename genType::value_type(5) * s2 + typename genType::value_type(2); + typename genType::value_type f3 = typename genType::value_type(-3) * s3 + typename genType::value_type(4) * s2 + s; + typename genType::value_type f4 = s3 - s2; + + return (f1 * v1 + f2 * v2 + f3 * v3 + f4 * v4) / typename genType::value_type(2); + + } + + template + GLM_FUNC_QUALIFIER genType hermite + ( + genType const& v1, + genType const& t1, + genType const& v2, + genType const& t2, + typename genType::value_type const& s + ) + { + typename genType::value_type s2 = pow2(s); + typename genType::value_type s3 = pow3(s); + + typename genType::value_type f1 = typename genType::value_type(2) * s3 - typename genType::value_type(3) * s2 + typename genType::value_type(1); + typename genType::value_type f2 = typename genType::value_type(-2) * s3 + typename genType::value_type(3) * s2; + typename genType::value_type f3 = s3 - typename genType::value_type(2) * s2 + s; + typename genType::value_type f4 = s3 - s2; + + return f1 * v1 + f2 * v2 + f3 * t1 + f4 * t2; + } + + template + GLM_FUNC_QUALIFIER genType cubic + ( + genType const& v1, + genType const& v2, + genType const& v3, + genType const& v4, + typename genType::value_type const& s + ) + { + return ((v1 * s + v2) * s + v3) * s + v4; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/std_based_type.hpp b/MacroLibX/third_party/glm/gtx/std_based_type.hpp new file mode 100644 index 0000000..cd3be8c --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/std_based_type.hpp @@ -0,0 +1,68 @@ +/// @ref gtx_std_based_type +/// @file glm/gtx/std_based_type.hpp +/// +/// @see core (dependence) +/// @see gtx_extented_min_max (dependence) +/// +/// @defgroup gtx_std_based_type GLM_GTX_std_based_type +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Adds vector types based on STL value types. + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_std_based_type is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_std_based_type extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_std_based_type + /// @{ + + /// Vector type based of one std::size_t component. + /// @see GLM_GTX_std_based_type + typedef vec<1, std::size_t, defaultp> size1; + + /// Vector type based of two std::size_t components. + /// @see GLM_GTX_std_based_type + typedef vec<2, std::size_t, defaultp> size2; + + /// Vector type based of three std::size_t components. + /// @see GLM_GTX_std_based_type + typedef vec<3, std::size_t, defaultp> size3; + + /// Vector type based of four std::size_t components. + /// @see GLM_GTX_std_based_type + typedef vec<4, std::size_t, defaultp> size4; + + /// Vector type based of one std::size_t component. + /// @see GLM_GTX_std_based_type + typedef vec<1, std::size_t, defaultp> size1_t; + + /// Vector type based of two std::size_t components. + /// @see GLM_GTX_std_based_type + typedef vec<2, std::size_t, defaultp> size2_t; + + /// Vector type based of three std::size_t components. + /// @see GLM_GTX_std_based_type + typedef vec<3, std::size_t, defaultp> size3_t; + + /// Vector type based of four std::size_t components. + /// @see GLM_GTX_std_based_type + typedef vec<4, std::size_t, defaultp> size4_t; + + /// @} +}//namespace glm + +#include "std_based_type.inl" diff --git a/MacroLibX/third_party/glm/gtx/std_based_type.inl b/MacroLibX/third_party/glm/gtx/std_based_type.inl new file mode 100644 index 0000000..9c34bdb --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/std_based_type.inl @@ -0,0 +1,6 @@ +/// @ref gtx_std_based_type + +namespace glm +{ + +} diff --git a/MacroLibX/third_party/glm/gtx/string_cast.hpp b/MacroLibX/third_party/glm/gtx/string_cast.hpp new file mode 100644 index 0000000..27846bf --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/string_cast.hpp @@ -0,0 +1,52 @@ +/// @ref gtx_string_cast +/// @file glm/gtx/string_cast.hpp +/// +/// @see core (dependence) +/// @see gtx_integer (dependence) +/// @see gtx_quaternion (dependence) +/// +/// @defgroup gtx_string_cast GLM_GTX_string_cast +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Setup strings for GLM type values +/// +/// This extension is not supported with CUDA + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/type_precision.hpp" +#include "../gtc/quaternion.hpp" +#include "../gtx/dual_quaternion.hpp" +#include +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_string_cast is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_string_cast extension included") +# endif +#endif + +#if(GLM_COMPILER & GLM_COMPILER_CUDA) +# error "GLM_GTX_string_cast is not supported on CUDA compiler" +#endif + +namespace glm +{ + /// @addtogroup gtx_string_cast + /// @{ + + /// Create a string from a GLM vector or matrix typed variable. + /// @see gtx_string_cast extension. + template + GLM_FUNC_DECL std::string to_string(genType const& x); + + /// @} +}//namespace glm + +#include "string_cast.inl" diff --git a/MacroLibX/third_party/glm/gtx/string_cast.inl b/MacroLibX/third_party/glm/gtx/string_cast.inl new file mode 100644 index 0000000..f67751d --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/string_cast.inl @@ -0,0 +1,492 @@ +/// @ref gtx_string_cast + +#include +#include + +namespace glm{ +namespace detail +{ + template + struct cast + { + typedef T value_type; + }; + + template <> + struct cast + { + typedef double value_type; + }; + + GLM_FUNC_QUALIFIER std::string format(const char* msg, ...) + { + std::size_t const STRING_BUFFER(4096); + char text[STRING_BUFFER]; + va_list list; + + if(msg == GLM_NULLPTR) + return std::string(); + + va_start(list, msg); +# if (GLM_COMPILER & GLM_COMPILER_VC) + vsprintf_s(text, STRING_BUFFER, msg, list); +# else// + std::vsprintf(text, msg, list); +# endif// + va_end(list); + + return std::string(text); + } + + static const char* LabelTrue = "true"; + static const char* LabelFalse = "false"; + + template + struct literal + { + GLM_FUNC_QUALIFIER static char const * value() {return "%d";} + }; + + template + struct literal + { + GLM_FUNC_QUALIFIER static char const * value() {return "%f";} + }; + +# if GLM_MODEL == GLM_MODEL_32 && GLM_COMPILER && GLM_COMPILER_VC + template<> + struct literal + { + GLM_FUNC_QUALIFIER static char const * value() {return "%lld";} + }; + + template<> + struct literal + { + GLM_FUNC_QUALIFIER static char const * value() {return "%lld";} + }; +# endif//GLM_MODEL == GLM_MODEL_32 && GLM_COMPILER && GLM_COMPILER_VC + + template + struct prefix{}; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "d";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "b";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "u8";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "i8";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "u16";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "i16";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "u";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "i";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "u64";} + }; + + template<> + struct prefix + { + GLM_FUNC_QUALIFIER static char const * value() {return "i64";} + }; + + template + struct compute_to_string + {}; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(vec<1, bool, Q> const& x) + { + return detail::format("bvec1(%s)", + x[0] ? detail::LabelTrue : detail::LabelFalse); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(vec<2, bool, Q> const& x) + { + return detail::format("bvec2(%s, %s)", + x[0] ? detail::LabelTrue : detail::LabelFalse, + x[1] ? detail::LabelTrue : detail::LabelFalse); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(vec<3, bool, Q> const& x) + { + return detail::format("bvec3(%s, %s, %s)", + x[0] ? detail::LabelTrue : detail::LabelFalse, + x[1] ? detail::LabelTrue : detail::LabelFalse, + x[2] ? detail::LabelTrue : detail::LabelFalse); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(vec<4, bool, Q> const& x) + { + return detail::format("bvec4(%s, %s, %s, %s)", + x[0] ? detail::LabelTrue : detail::LabelFalse, + x[1] ? detail::LabelTrue : detail::LabelFalse, + x[2] ? detail::LabelTrue : detail::LabelFalse, + x[3] ? detail::LabelTrue : detail::LabelFalse); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(vec<1, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%svec1(%s)", + PrefixStr, + LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(vec<2, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%svec2(%s, %s)", + PrefixStr, + LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0]), + static_cast::value_type>(x[1])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(vec<3, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%svec3(%s, %s, %s)", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0]), + static_cast::value_type>(x[1]), + static_cast::value_type>(x[2])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(vec<4, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%svec4(%s, %s, %s, %s)", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0]), + static_cast::value_type>(x[1]), + static_cast::value_type>(x[2]), + static_cast::value_type>(x[3])); + } + }; + + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<2, 2, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat2x2((%s, %s), (%s, %s))", + PrefixStr, + LiteralStr, LiteralStr, + LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<2, 3, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat2x3((%s, %s, %s), (%s, %s, %s))", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<2, 4, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat2x4((%s, %s, %s, %s), (%s, %s, %s, %s))", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), static_cast::value_type>(x[0][3]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), static_cast::value_type>(x[1][3])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<3, 2, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat3x2((%s, %s), (%s, %s), (%s, %s))", + PrefixStr, + LiteralStr, LiteralStr, + LiteralStr, LiteralStr, + LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), + static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<3, 3, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat3x3((%s, %s, %s), (%s, %s, %s), (%s, %s, %s))", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), + static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), static_cast::value_type>(x[2][2])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<3, 4, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat3x4((%s, %s, %s, %s), (%s, %s, %s, %s), (%s, %s, %s, %s))", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), static_cast::value_type>(x[0][3]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), static_cast::value_type>(x[1][3]), + static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), static_cast::value_type>(x[2][2]), static_cast::value_type>(x[2][3])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<4, 2, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat4x2((%s, %s), (%s, %s), (%s, %s), (%s, %s))", + PrefixStr, + LiteralStr, LiteralStr, + LiteralStr, LiteralStr, + LiteralStr, LiteralStr, + LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), + static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), + static_cast::value_type>(x[3][0]), static_cast::value_type>(x[3][1])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<4, 3, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat4x3((%s, %s, %s), (%s, %s, %s), (%s, %s, %s), (%s, %s, %s))", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), + static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), static_cast::value_type>(x[2][2]), + static_cast::value_type>(x[3][0]), static_cast::value_type>(x[3][1]), static_cast::value_type>(x[3][2])); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(mat<4, 4, T, Q> const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%smat4x4((%s, %s, %s, %s), (%s, %s, %s, %s), (%s, %s, %s, %s), (%s, %s, %s, %s))", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), static_cast::value_type>(x[0][3]), + static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), static_cast::value_type>(x[1][3]), + static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), static_cast::value_type>(x[2][2]), static_cast::value_type>(x[2][3]), + static_cast::value_type>(x[3][0]), static_cast::value_type>(x[3][1]), static_cast::value_type>(x[3][2]), static_cast::value_type>(x[3][3])); + } + }; + + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(qua const& q) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%squat(%s, {%s, %s, %s})", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(q.w), + static_cast::value_type>(q.x), + static_cast::value_type>(q.y), + static_cast::value_type>(q.z)); + } + }; + + template + struct compute_to_string > + { + GLM_FUNC_QUALIFIER static std::string call(tdualquat const& x) + { + char const * PrefixStr = prefix::value(); + char const * LiteralStr = literal::is_iec559>::value(); + std::string FormatStr(detail::format("%sdualquat((%s, {%s, %s, %s}), (%s, {%s, %s, %s}))", + PrefixStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr, + LiteralStr, LiteralStr, LiteralStr, LiteralStr)); + + return detail::format(FormatStr.c_str(), + static_cast::value_type>(x.real.w), + static_cast::value_type>(x.real.x), + static_cast::value_type>(x.real.y), + static_cast::value_type>(x.real.z), + static_cast::value_type>(x.dual.w), + static_cast::value_type>(x.dual.x), + static_cast::value_type>(x.dual.y), + static_cast::value_type>(x.dual.z)); + } + }; + +}//namespace detail + +template +GLM_FUNC_QUALIFIER std::string to_string(matType const& x) +{ + return detail::compute_to_string::call(x); +} + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/texture.hpp b/MacroLibX/third_party/glm/gtx/texture.hpp new file mode 100644 index 0000000..20585e6 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/texture.hpp @@ -0,0 +1,46 @@ +/// @ref gtx_texture +/// @file glm/gtx/texture.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_texture GLM_GTX_texture +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Wrapping mode of texture coordinates. + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/integer.hpp" +#include "../gtx/component_wise.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_texture is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_texture extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_texture + /// @{ + + /// Compute the number of mipmaps levels necessary to create a mipmap complete texture + /// + /// @param Extent Extent of the texture base level mipmap + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point or signed integer scalar types + /// @tparam Q Value from qualifier enum + template + T levels(vec const& Extent); + + /// @} +}// namespace glm + +#include "texture.inl" + diff --git a/MacroLibX/third_party/glm/gtx/texture.inl b/MacroLibX/third_party/glm/gtx/texture.inl new file mode 100644 index 0000000..593c826 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/texture.inl @@ -0,0 +1,17 @@ +/// @ref gtx_texture + +namespace glm +{ + template + inline T levels(vec const& Extent) + { + return glm::log2(compMax(Extent)) + static_cast(1); + } + + template + inline T levels(T Extent) + { + return vec<1, T, defaultp>(Extent).x; + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/gtx/transform.hpp b/MacroLibX/third_party/glm/gtx/transform.hpp new file mode 100644 index 0000000..0279fc8 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/transform.hpp @@ -0,0 +1,60 @@ +/// @ref gtx_transform +/// @file glm/gtx/transform.hpp +/// +/// @see core (dependence) +/// @see gtc_matrix_transform (dependence) +/// @see gtx_transform +/// @see gtx_transform2 +/// +/// @defgroup gtx_transform GLM_GTX_transform +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Add transformation matrices + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/matrix_transform.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_transform is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_transform extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_transform + /// @{ + + /// Transforms a matrix with a translation 4 * 4 matrix created from 3 scalars. + /// @see gtc_matrix_transform + /// @see gtx_transform + template + GLM_FUNC_DECL mat<4, 4, T, Q> translate( + vec<3, T, Q> const& v); + + /// Builds a rotation 4 * 4 matrix created from an axis of 3 scalars and an angle expressed in radians. + /// @see gtc_matrix_transform + /// @see gtx_transform + template + GLM_FUNC_DECL mat<4, 4, T, Q> rotate( + T angle, + vec<3, T, Q> const& v); + + /// Transforms a matrix with a scale 4 * 4 matrix created from a vector of 3 components. + /// @see gtc_matrix_transform + /// @see gtx_transform + template + GLM_FUNC_DECL mat<4, 4, T, Q> scale( + vec<3, T, Q> const& v); + + /// @} +}// namespace glm + +#include "transform.inl" diff --git a/MacroLibX/third_party/glm/gtx/transform.inl b/MacroLibX/third_party/glm/gtx/transform.inl new file mode 100644 index 0000000..48ee680 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/transform.inl @@ -0,0 +1,23 @@ +/// @ref gtx_transform + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> translate(vec<3, T, Q> const& v) + { + return translate(mat<4, 4, T, Q>(static_cast(1)), v); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rotate(T angle, vec<3, T, Q> const& v) + { + return rotate(mat<4, 4, T, Q>(static_cast(1)), angle, v); + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scale(vec<3, T, Q> const& v) + { + return scale(mat<4, 4, T, Q>(static_cast(1)), v); + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/transform2.hpp b/MacroLibX/third_party/glm/gtx/transform2.hpp new file mode 100644 index 0000000..0d8ba9d --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/transform2.hpp @@ -0,0 +1,89 @@ +/// @ref gtx_transform2 +/// @file glm/gtx/transform2.hpp +/// +/// @see core (dependence) +/// @see gtx_transform (dependence) +/// +/// @defgroup gtx_transform2 GLM_GTX_transform2 +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Add extra transformation matrices + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtx/transform.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_transform2 is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_transform2 extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_transform2 + /// @{ + + //! Transforms a matrix with a shearing on X axis. + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> shearX2D(mat<3, 3, T, Q> const& m, T y); + + //! Transforms a matrix with a shearing on Y axis. + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> shearY2D(mat<3, 3, T, Q> const& m, T x); + + //! Transforms a matrix with a shearing on X axis + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> shearX3D(mat<4, 4, T, Q> const& m, T y, T z); + + //! Transforms a matrix with a shearing on Y axis. + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> shearY3D(mat<4, 4, T, Q> const& m, T x, T z); + + //! Transforms a matrix with a shearing on Z axis. + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> shearZ3D(mat<4, 4, T, Q> const& m, T x, T y); + + //template GLM_FUNC_QUALIFIER mat<4, 4, T, Q> shear(const mat<4, 4, T, Q> & m, shearPlane, planePoint, angle) + // Identity + tan(angle) * cross(Normal, OnPlaneVector) 0 + // - dot(PointOnPlane, normal) * OnPlaneVector 1 + + // Reflect functions seem to don't work + //template mat<3, 3, T, Q> reflect2D(const mat<3, 3, T, Q> & m, const vec<3, T, Q>& normal){return reflect2DGTX(m, normal);} //!< \brief Build a reflection matrix (from GLM_GTX_transform2 extension) + //template mat<4, 4, T, Q> reflect3D(const mat<4, 4, T, Q> & m, const vec<3, T, Q>& normal){return reflect3DGTX(m, normal);} //!< \brief Build a reflection matrix (from GLM_GTX_transform2 extension) + + //! Build planar projection matrix along normal axis. + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<3, 3, T, Q> proj2D(mat<3, 3, T, Q> const& m, vec<3, T, Q> const& normal); + + //! Build planar projection matrix along normal axis. + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> proj3D(mat<4, 4, T, Q> const & m, vec<3, T, Q> const& normal); + + //! Build a scale bias matrix. + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> scaleBias(T scale, T bias); + + //! Build a scale bias matrix. + //! From GLM_GTX_transform2 extension. + template + GLM_FUNC_DECL mat<4, 4, T, Q> scaleBias(mat<4, 4, T, Q> const& m, T scale, T bias); + + /// @} +}// namespace glm + +#include "transform2.inl" diff --git a/MacroLibX/third_party/glm/gtx/transform2.inl b/MacroLibX/third_party/glm/gtx/transform2.inl new file mode 100644 index 0000000..2b53198 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/transform2.inl @@ -0,0 +1,125 @@ +/// @ref gtx_transform2 + +namespace glm +{ + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearX2D(mat<3, 3, T, Q> const& m, T s) + { + mat<3, 3, T, Q> r(1); + r[1][0] = s; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearY2D(mat<3, 3, T, Q> const& m, T s) + { + mat<3, 3, T, Q> r(1); + r[0][1] = s; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> shearX3D(mat<4, 4, T, Q> const& m, T s, T t) + { + mat<4, 4, T, Q> r(1); + r[0][1] = s; + r[0][2] = t; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> shearY3D(mat<4, 4, T, Q> const& m, T s, T t) + { + mat<4, 4, T, Q> r(1); + r[1][0] = s; + r[1][2] = t; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> shearZ3D(mat<4, 4, T, Q> const& m, T s, T t) + { + mat<4, 4, T, Q> r(1); + r[2][0] = s; + r[2][1] = t; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> reflect2D(mat<3, 3, T, Q> const& m, vec<3, T, Q> const& normal) + { + mat<3, 3, T, Q> r(static_cast(1)); + r[0][0] = static_cast(1) - static_cast(2) * normal.x * normal.x; + r[0][1] = -static_cast(2) * normal.x * normal.y; + r[1][0] = -static_cast(2) * normal.x * normal.y; + r[1][1] = static_cast(1) - static_cast(2) * normal.y * normal.y; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> reflect3D(mat<4, 4, T, Q> const& m, vec<3, T, Q> const& normal) + { + mat<4, 4, T, Q> r(static_cast(1)); + r[0][0] = static_cast(1) - static_cast(2) * normal.x * normal.x; + r[0][1] = -static_cast(2) * normal.x * normal.y; + r[0][2] = -static_cast(2) * normal.x * normal.z; + + r[1][0] = -static_cast(2) * normal.x * normal.y; + r[1][1] = static_cast(1) - static_cast(2) * normal.y * normal.y; + r[1][2] = -static_cast(2) * normal.y * normal.z; + + r[2][0] = -static_cast(2) * normal.x * normal.z; + r[2][1] = -static_cast(2) * normal.y * normal.z; + r[2][2] = static_cast(1) - static_cast(2) * normal.z * normal.z; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<3, 3, T, Q> proj2D( + const mat<3, 3, T, Q>& m, + const vec<3, T, Q>& normal) + { + mat<3, 3, T, Q> r(static_cast(1)); + r[0][0] = static_cast(1) - normal.x * normal.x; + r[0][1] = - normal.x * normal.y; + r[1][0] = - normal.x * normal.y; + r[1][1] = static_cast(1) - normal.y * normal.y; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> proj3D( + const mat<4, 4, T, Q>& m, + const vec<3, T, Q>& normal) + { + mat<4, 4, T, Q> r(static_cast(1)); + r[0][0] = static_cast(1) - normal.x * normal.x; + r[0][1] = - normal.x * normal.y; + r[0][2] = - normal.x * normal.z; + r[1][0] = - normal.x * normal.y; + r[1][1] = static_cast(1) - normal.y * normal.y; + r[1][2] = - normal.y * normal.z; + r[2][0] = - normal.x * normal.z; + r[2][1] = - normal.y * normal.z; + r[2][2] = static_cast(1) - normal.z * normal.z; + return m * r; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scaleBias(T scale, T bias) + { + mat<4, 4, T, Q> result; + result[3] = vec<4, T, Q>(vec<3, T, Q>(bias), static_cast(1)); + result[0][0] = scale; + result[1][1] = scale; + result[2][2] = scale; + return result; + } + + template + GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scaleBias(mat<4, 4, T, Q> const& m, T scale, T bias) + { + return m * scaleBias(scale, bias); + } +}//namespace glm + diff --git a/MacroLibX/third_party/glm/gtx/type_aligned.hpp b/MacroLibX/third_party/glm/gtx/type_aligned.hpp new file mode 100644 index 0000000..2ae522c --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/type_aligned.hpp @@ -0,0 +1,982 @@ +/// @ref gtx_type_aligned +/// @file glm/gtx/type_aligned.hpp +/// +/// @see core (dependence) +/// @see gtc_quaternion (dependence) +/// +/// @defgroup gtx_type_aligned GLM_GTX_type_aligned +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Defines aligned types. + +#pragma once + +// Dependency: +#include "../gtc/type_precision.hpp" +#include "../gtc/quaternion.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_type_aligned is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_type_aligned extension included") +# endif +#endif + +namespace glm +{ + /////////////////////////// + // Signed int vector types + + /// @addtogroup gtx_type_aligned + /// @{ + + /// Low qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_int8, aligned_lowp_int8, 1); + + /// Low qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_int16, aligned_lowp_int16, 2); + + /// Low qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_int32, aligned_lowp_int32, 4); + + /// Low qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_int64, aligned_lowp_int64, 8); + + + /// Low qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_int8_t, aligned_lowp_int8_t, 1); + + /// Low qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_int16_t, aligned_lowp_int16_t, 2); + + /// Low qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_int32_t, aligned_lowp_int32_t, 4); + + /// Low qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_int64_t, aligned_lowp_int64_t, 8); + + + /// Low qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_i8, aligned_lowp_i8, 1); + + /// Low qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_i16, aligned_lowp_i16, 2); + + /// Low qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_i32, aligned_lowp_i32, 4); + + /// Low qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_i64, aligned_lowp_i64, 8); + + + /// Medium qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_int8, aligned_mediump_int8, 1); + + /// Medium qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_int16, aligned_mediump_int16, 2); + + /// Medium qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_int32, aligned_mediump_int32, 4); + + /// Medium qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_int64, aligned_mediump_int64, 8); + + + /// Medium qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_int8_t, aligned_mediump_int8_t, 1); + + /// Medium qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_int16_t, aligned_mediump_int16_t, 2); + + /// Medium qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_int32_t, aligned_mediump_int32_t, 4); + + /// Medium qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_int64_t, aligned_mediump_int64_t, 8); + + + /// Medium qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_i8, aligned_mediump_i8, 1); + + /// Medium qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_i16, aligned_mediump_i16, 2); + + /// Medium qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_i32, aligned_mediump_i32, 4); + + /// Medium qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_i64, aligned_mediump_i64, 8); + + + /// High qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_int8, aligned_highp_int8, 1); + + /// High qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_int16, aligned_highp_int16, 2); + + /// High qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_int32, aligned_highp_int32, 4); + + /// High qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_int64, aligned_highp_int64, 8); + + + /// High qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_int8_t, aligned_highp_int8_t, 1); + + /// High qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_int16_t, aligned_highp_int16_t, 2); + + /// High qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_int32_t, aligned_highp_int32_t, 4); + + /// High qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_int64_t, aligned_highp_int64_t, 8); + + + /// High qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_i8, aligned_highp_i8, 1); + + /// High qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_i16, aligned_highp_i16, 2); + + /// High qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_i32, aligned_highp_i32, 4); + + /// High qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_i64, aligned_highp_i64, 8); + + + /// Default qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(int8, aligned_int8, 1); + + /// Default qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(int16, aligned_int16, 2); + + /// Default qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(int32, aligned_int32, 4); + + /// Default qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(int64, aligned_int64, 8); + + + /// Default qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(int8_t, aligned_int8_t, 1); + + /// Default qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(int16_t, aligned_int16_t, 2); + + /// Default qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(int32_t, aligned_int32_t, 4); + + /// Default qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(int64_t, aligned_int64_t, 8); + + + /// Default qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i8, aligned_i8, 1); + + /// Default qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i16, aligned_i16, 2); + + /// Default qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i32, aligned_i32, 4); + + /// Default qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i64, aligned_i64, 8); + + + /// Default qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(ivec1, aligned_ivec1, 4); + + /// Default qualifier 32 bit signed integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(ivec2, aligned_ivec2, 8); + + /// Default qualifier 32 bit signed integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(ivec3, aligned_ivec3, 16); + + /// Default qualifier 32 bit signed integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(ivec4, aligned_ivec4, 16); + + + /// Default qualifier 8 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i8vec1, aligned_i8vec1, 1); + + /// Default qualifier 8 bit signed integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i8vec2, aligned_i8vec2, 2); + + /// Default qualifier 8 bit signed integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i8vec3, aligned_i8vec3, 4); + + /// Default qualifier 8 bit signed integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i8vec4, aligned_i8vec4, 4); + + + /// Default qualifier 16 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i16vec1, aligned_i16vec1, 2); + + /// Default qualifier 16 bit signed integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i16vec2, aligned_i16vec2, 4); + + /// Default qualifier 16 bit signed integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i16vec3, aligned_i16vec3, 8); + + /// Default qualifier 16 bit signed integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i16vec4, aligned_i16vec4, 8); + + + /// Default qualifier 32 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i32vec1, aligned_i32vec1, 4); + + /// Default qualifier 32 bit signed integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i32vec2, aligned_i32vec2, 8); + + /// Default qualifier 32 bit signed integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i32vec3, aligned_i32vec3, 16); + + /// Default qualifier 32 bit signed integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i32vec4, aligned_i32vec4, 16); + + + /// Default qualifier 64 bit signed integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i64vec1, aligned_i64vec1, 8); + + /// Default qualifier 64 bit signed integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i64vec2, aligned_i64vec2, 16); + + /// Default qualifier 64 bit signed integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i64vec3, aligned_i64vec3, 32); + + /// Default qualifier 64 bit signed integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(i64vec4, aligned_i64vec4, 32); + + + ///////////////////////////// + // Unsigned int vector types + + /// Low qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_uint8, aligned_lowp_uint8, 1); + + /// Low qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_uint16, aligned_lowp_uint16, 2); + + /// Low qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_uint32, aligned_lowp_uint32, 4); + + /// Low qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_uint64, aligned_lowp_uint64, 8); + + + /// Low qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_uint8_t, aligned_lowp_uint8_t, 1); + + /// Low qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_uint16_t, aligned_lowp_uint16_t, 2); + + /// Low qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_uint32_t, aligned_lowp_uint32_t, 4); + + /// Low qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_uint64_t, aligned_lowp_uint64_t, 8); + + + /// Low qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_u8, aligned_lowp_u8, 1); + + /// Low qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_u16, aligned_lowp_u16, 2); + + /// Low qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_u32, aligned_lowp_u32, 4); + + /// Low qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(lowp_u64, aligned_lowp_u64, 8); + + + /// Medium qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_uint8, aligned_mediump_uint8, 1); + + /// Medium qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_uint16, aligned_mediump_uint16, 2); + + /// Medium qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_uint32, aligned_mediump_uint32, 4); + + /// Medium qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_uint64, aligned_mediump_uint64, 8); + + + /// Medium qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_uint8_t, aligned_mediump_uint8_t, 1); + + /// Medium qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_uint16_t, aligned_mediump_uint16_t, 2); + + /// Medium qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_uint32_t, aligned_mediump_uint32_t, 4); + + /// Medium qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_uint64_t, aligned_mediump_uint64_t, 8); + + + /// Medium qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_u8, aligned_mediump_u8, 1); + + /// Medium qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_u16, aligned_mediump_u16, 2); + + /// Medium qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_u32, aligned_mediump_u32, 4); + + /// Medium qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mediump_u64, aligned_mediump_u64, 8); + + + /// High qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_uint8, aligned_highp_uint8, 1); + + /// High qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_uint16, aligned_highp_uint16, 2); + + /// High qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_uint32, aligned_highp_uint32, 4); + + /// High qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_uint64, aligned_highp_uint64, 8); + + + /// High qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_uint8_t, aligned_highp_uint8_t, 1); + + /// High qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_uint16_t, aligned_highp_uint16_t, 2); + + /// High qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_uint32_t, aligned_highp_uint32_t, 4); + + /// High qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_uint64_t, aligned_highp_uint64_t, 8); + + + /// High qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_u8, aligned_highp_u8, 1); + + /// High qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_u16, aligned_highp_u16, 2); + + /// High qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_u32, aligned_highp_u32, 4); + + /// High qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(highp_u64, aligned_highp_u64, 8); + + + /// Default qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uint8, aligned_uint8, 1); + + /// Default qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uint16, aligned_uint16, 2); + + /// Default qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uint32, aligned_uint32, 4); + + /// Default qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uint64, aligned_uint64, 8); + + + /// Default qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uint8_t, aligned_uint8_t, 1); + + /// Default qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uint16_t, aligned_uint16_t, 2); + + /// Default qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uint32_t, aligned_uint32_t, 4); + + /// Default qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uint64_t, aligned_uint64_t, 8); + + + /// Default qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u8, aligned_u8, 1); + + /// Default qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u16, aligned_u16, 2); + + /// Default qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u32, aligned_u32, 4); + + /// Default qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u64, aligned_u64, 8); + + + /// Default qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uvec1, aligned_uvec1, 4); + + /// Default qualifier 32 bit unsigned integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uvec2, aligned_uvec2, 8); + + /// Default qualifier 32 bit unsigned integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uvec3, aligned_uvec3, 16); + + /// Default qualifier 32 bit unsigned integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(uvec4, aligned_uvec4, 16); + + + /// Default qualifier 8 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u8vec1, aligned_u8vec1, 1); + + /// Default qualifier 8 bit unsigned integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u8vec2, aligned_u8vec2, 2); + + /// Default qualifier 8 bit unsigned integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u8vec3, aligned_u8vec3, 4); + + /// Default qualifier 8 bit unsigned integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u8vec4, aligned_u8vec4, 4); + + + /// Default qualifier 16 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u16vec1, aligned_u16vec1, 2); + + /// Default qualifier 16 bit unsigned integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u16vec2, aligned_u16vec2, 4); + + /// Default qualifier 16 bit unsigned integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u16vec3, aligned_u16vec3, 8); + + /// Default qualifier 16 bit unsigned integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u16vec4, aligned_u16vec4, 8); + + + /// Default qualifier 32 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u32vec1, aligned_u32vec1, 4); + + /// Default qualifier 32 bit unsigned integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u32vec2, aligned_u32vec2, 8); + + /// Default qualifier 32 bit unsigned integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u32vec3, aligned_u32vec3, 16); + + /// Default qualifier 32 bit unsigned integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u32vec4, aligned_u32vec4, 16); + + + /// Default qualifier 64 bit unsigned integer aligned scalar type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u64vec1, aligned_u64vec1, 8); + + /// Default qualifier 64 bit unsigned integer aligned vector of 2 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u64vec2, aligned_u64vec2, 16); + + /// Default qualifier 64 bit unsigned integer aligned vector of 3 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u64vec3, aligned_u64vec3, 32); + + /// Default qualifier 64 bit unsigned integer aligned vector of 4 components type. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(u64vec4, aligned_u64vec4, 32); + + + ////////////////////// + // Float vector types + + /// 32 bit single-qualifier floating-point aligned scalar. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(float32, aligned_float32, 4); + + /// 32 bit single-qualifier floating-point aligned scalar. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(float32_t, aligned_float32_t, 4); + + /// 32 bit single-qualifier floating-point aligned scalar. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(float32, aligned_f32, 4); + +# ifndef GLM_FORCE_SINGLE_ONLY + + /// 64 bit double-qualifier floating-point aligned scalar. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(float64, aligned_float64, 8); + + /// 64 bit double-qualifier floating-point aligned scalar. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(float64_t, aligned_float64_t, 8); + + /// 64 bit double-qualifier floating-point aligned scalar. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(float64, aligned_f64, 8); + +# endif//GLM_FORCE_SINGLE_ONLY + + + /// Single-qualifier floating-point aligned vector of 1 component. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(vec1, aligned_vec1, 4); + + /// Single-qualifier floating-point aligned vector of 2 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(vec2, aligned_vec2, 8); + + /// Single-qualifier floating-point aligned vector of 3 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(vec3, aligned_vec3, 16); + + /// Single-qualifier floating-point aligned vector of 4 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(vec4, aligned_vec4, 16); + + + /// Single-qualifier floating-point aligned vector of 1 component. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fvec1, aligned_fvec1, 4); + + /// Single-qualifier floating-point aligned vector of 2 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fvec2, aligned_fvec2, 8); + + /// Single-qualifier floating-point aligned vector of 3 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fvec3, aligned_fvec3, 16); + + /// Single-qualifier floating-point aligned vector of 4 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fvec4, aligned_fvec4, 16); + + + /// Single-qualifier floating-point aligned vector of 1 component. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32vec1, aligned_f32vec1, 4); + + /// Single-qualifier floating-point aligned vector of 2 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32vec2, aligned_f32vec2, 8); + + /// Single-qualifier floating-point aligned vector of 3 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32vec3, aligned_f32vec3, 16); + + /// Single-qualifier floating-point aligned vector of 4 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32vec4, aligned_f32vec4, 16); + + + /// Double-qualifier floating-point aligned vector of 1 component. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(dvec1, aligned_dvec1, 8); + + /// Double-qualifier floating-point aligned vector of 2 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(dvec2, aligned_dvec2, 16); + + /// Double-qualifier floating-point aligned vector of 3 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(dvec3, aligned_dvec3, 32); + + /// Double-qualifier floating-point aligned vector of 4 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(dvec4, aligned_dvec4, 32); + + +# ifndef GLM_FORCE_SINGLE_ONLY + + /// Double-qualifier floating-point aligned vector of 1 component. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64vec1, aligned_f64vec1, 8); + + /// Double-qualifier floating-point aligned vector of 2 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64vec2, aligned_f64vec2, 16); + + /// Double-qualifier floating-point aligned vector of 3 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64vec3, aligned_f64vec3, 32); + + /// Double-qualifier floating-point aligned vector of 4 components. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64vec4, aligned_f64vec4, 32); + +# endif//GLM_FORCE_SINGLE_ONLY + + ////////////////////// + // Float matrix types + + /// Single-qualifier floating-point aligned 1x1 matrix. + /// @see gtx_type_aligned + //typedef detail::tmat1 mat1; + + /// Single-qualifier floating-point aligned 2x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mat2, aligned_mat2, 16); + + /// Single-qualifier floating-point aligned 3x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mat3, aligned_mat3, 16); + + /// Single-qualifier floating-point aligned 4x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mat4, aligned_mat4, 16); + + + /// Single-qualifier floating-point aligned 1x1 matrix. + /// @see gtx_type_aligned + //typedef detail::tmat1x1 mat1; + + /// Single-qualifier floating-point aligned 2x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mat2x2, aligned_mat2x2, 16); + + /// Single-qualifier floating-point aligned 3x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mat3x3, aligned_mat3x3, 16); + + /// Single-qualifier floating-point aligned 4x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(mat4x4, aligned_mat4x4, 16); + + + /// Single-qualifier floating-point aligned 1x1 matrix. + /// @see gtx_type_aligned + //typedef detail::tmat1x1 fmat1; + + /// Single-qualifier floating-point aligned 2x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat2x2, aligned_fmat2, 16); + + /// Single-qualifier floating-point aligned 3x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat3x3, aligned_fmat3, 16); + + /// Single-qualifier floating-point aligned 4x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat4x4, aligned_fmat4, 16); + + + /// Single-qualifier floating-point aligned 1x1 matrix. + /// @see gtx_type_aligned + //typedef f32 fmat1x1; + + /// Single-qualifier floating-point aligned 2x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat2x2, aligned_fmat2x2, 16); + + /// Single-qualifier floating-point aligned 2x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat2x3, aligned_fmat2x3, 16); + + /// Single-qualifier floating-point aligned 2x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat2x4, aligned_fmat2x4, 16); + + /// Single-qualifier floating-point aligned 3x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat3x2, aligned_fmat3x2, 16); + + /// Single-qualifier floating-point aligned 3x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat3x3, aligned_fmat3x3, 16); + + /// Single-qualifier floating-point aligned 3x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat3x4, aligned_fmat3x4, 16); + + /// Single-qualifier floating-point aligned 4x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat4x2, aligned_fmat4x2, 16); + + /// Single-qualifier floating-point aligned 4x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat4x3, aligned_fmat4x3, 16); + + /// Single-qualifier floating-point aligned 4x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(fmat4x4, aligned_fmat4x4, 16); + + + /// Single-qualifier floating-point aligned 1x1 matrix. + /// @see gtx_type_aligned + //typedef detail::tmat1x1 f32mat1; + + /// Single-qualifier floating-point aligned 2x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat2x2, aligned_f32mat2, 16); + + /// Single-qualifier floating-point aligned 3x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat3x3, aligned_f32mat3, 16); + + /// Single-qualifier floating-point aligned 4x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat4x4, aligned_f32mat4, 16); + + + /// Single-qualifier floating-point aligned 1x1 matrix. + /// @see gtx_type_aligned + //typedef f32 f32mat1x1; + + /// Single-qualifier floating-point aligned 2x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat2x2, aligned_f32mat2x2, 16); + + /// Single-qualifier floating-point aligned 2x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat2x3, aligned_f32mat2x3, 16); + + /// Single-qualifier floating-point aligned 2x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat2x4, aligned_f32mat2x4, 16); + + /// Single-qualifier floating-point aligned 3x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat3x2, aligned_f32mat3x2, 16); + + /// Single-qualifier floating-point aligned 3x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat3x3, aligned_f32mat3x3, 16); + + /// Single-qualifier floating-point aligned 3x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat3x4, aligned_f32mat3x4, 16); + + /// Single-qualifier floating-point aligned 4x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat4x2, aligned_f32mat4x2, 16); + + /// Single-qualifier floating-point aligned 4x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat4x3, aligned_f32mat4x3, 16); + + /// Single-qualifier floating-point aligned 4x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32mat4x4, aligned_f32mat4x4, 16); + + +# ifndef GLM_FORCE_SINGLE_ONLY + + /// Double-qualifier floating-point aligned 1x1 matrix. + /// @see gtx_type_aligned + //typedef detail::tmat1x1 f64mat1; + + /// Double-qualifier floating-point aligned 2x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat2x2, aligned_f64mat2, 32); + + /// Double-qualifier floating-point aligned 3x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat3x3, aligned_f64mat3, 32); + + /// Double-qualifier floating-point aligned 4x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat4x4, aligned_f64mat4, 32); + + + /// Double-qualifier floating-point aligned 1x1 matrix. + /// @see gtx_type_aligned + //typedef f64 f64mat1x1; + + /// Double-qualifier floating-point aligned 2x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat2x2, aligned_f64mat2x2, 32); + + /// Double-qualifier floating-point aligned 2x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat2x3, aligned_f64mat2x3, 32); + + /// Double-qualifier floating-point aligned 2x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat2x4, aligned_f64mat2x4, 32); + + /// Double-qualifier floating-point aligned 3x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat3x2, aligned_f64mat3x2, 32); + + /// Double-qualifier floating-point aligned 3x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat3x3, aligned_f64mat3x3, 32); + + /// Double-qualifier floating-point aligned 3x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat3x4, aligned_f64mat3x4, 32); + + /// Double-qualifier floating-point aligned 4x2 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat4x2, aligned_f64mat4x2, 32); + + /// Double-qualifier floating-point aligned 4x3 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat4x3, aligned_f64mat4x3, 32); + + /// Double-qualifier floating-point aligned 4x4 matrix. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64mat4x4, aligned_f64mat4x4, 32); + +# endif//GLM_FORCE_SINGLE_ONLY + + + ////////////////////////// + // Quaternion types + + /// Single-qualifier floating-point aligned quaternion. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(quat, aligned_quat, 16); + + /// Single-qualifier floating-point aligned quaternion. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(quat, aligned_fquat, 16); + + /// Double-qualifier floating-point aligned quaternion. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(dquat, aligned_dquat, 32); + + /// Single-qualifier floating-point aligned quaternion. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f32quat, aligned_f32quat, 16); + +# ifndef GLM_FORCE_SINGLE_ONLY + + /// Double-qualifier floating-point aligned quaternion. + /// @see gtx_type_aligned + GLM_ALIGNED_TYPEDEF(f64quat, aligned_f64quat, 32); + +# endif//GLM_FORCE_SINGLE_ONLY + + /// @} +}//namespace glm + +#include "type_aligned.inl" diff --git a/MacroLibX/third_party/glm/gtx/type_aligned.inl b/MacroLibX/third_party/glm/gtx/type_aligned.inl new file mode 100644 index 0000000..54c1b81 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/type_aligned.inl @@ -0,0 +1,6 @@ +/// @ref gtc_type_aligned + +namespace glm +{ + +} diff --git a/MacroLibX/third_party/glm/gtx/type_trait.hpp b/MacroLibX/third_party/glm/gtx/type_trait.hpp new file mode 100644 index 0000000..56685c8 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/type_trait.hpp @@ -0,0 +1,85 @@ +/// @ref gtx_type_trait +/// @file glm/gtx/type_trait.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_type_trait GLM_GTX_type_trait +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Defines traits for each type. + +#pragma once + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_type_trait is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_type_trait extension included") +# endif +#endif + +// Dependency: +#include "../detail/qualifier.hpp" +#include "../gtc/quaternion.hpp" +#include "../gtx/dual_quaternion.hpp" + +namespace glm +{ + /// @addtogroup gtx_type_trait + /// @{ + + template + struct type + { + static bool const is_vec = false; + static bool const is_mat = false; + static bool const is_quat = false; + static length_t const components = 0; + static length_t const cols = 0; + static length_t const rows = 0; + }; + + template + struct type > + { + static bool const is_vec = true; + static bool const is_mat = false; + static bool const is_quat = false; + static length_t const components = L; + }; + + template + struct type > + { + static bool const is_vec = false; + static bool const is_mat = true; + static bool const is_quat = false; + static length_t const components = C; + static length_t const cols = C; + static length_t const rows = R; + }; + + template + struct type > + { + static bool const is_vec = false; + static bool const is_mat = false; + static bool const is_quat = true; + static length_t const components = 4; + }; + + template + struct type > + { + static bool const is_vec = false; + static bool const is_mat = false; + static bool const is_quat = true; + static length_t const components = 8; + }; + + /// @} +}//namespace glm + +#include "type_trait.inl" diff --git a/MacroLibX/third_party/glm/gtx/type_trait.inl b/MacroLibX/third_party/glm/gtx/type_trait.inl new file mode 100644 index 0000000..045de95 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/type_trait.inl @@ -0,0 +1,61 @@ +/// @ref gtx_type_trait + +namespace glm +{ + template + bool const type::is_vec; + template + bool const type::is_mat; + template + bool const type::is_quat; + template + length_t const type::components; + template + length_t const type::cols; + template + length_t const type::rows; + + // vec + template + bool const type >::is_vec; + template + bool const type >::is_mat; + template + bool const type >::is_quat; + template + length_t const type >::components; + + // mat + template + bool const type >::is_vec; + template + bool const type >::is_mat; + template + bool const type >::is_quat; + template + length_t const type >::components; + template + length_t const type >::cols; + template + length_t const type >::rows; + + // tquat + template + bool const type >::is_vec; + template + bool const type >::is_mat; + template + bool const type >::is_quat; + template + length_t const type >::components; + + // tdualquat + template + bool const type >::is_vec; + template + bool const type >::is_mat; + template + bool const type >::is_quat; + template + length_t const type >::components; +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/vec_swizzle.hpp b/MacroLibX/third_party/glm/gtx/vec_swizzle.hpp new file mode 100644 index 0000000..1c49abc --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/vec_swizzle.hpp @@ -0,0 +1,2782 @@ +/// @ref gtx_vec_swizzle +/// @file glm/gtx/vec_swizzle.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_vec_swizzle GLM_GTX_vec_swizzle +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Functions to perform swizzle operation. + +#pragma once + +#include "../glm.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_vec_swizzle is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_vec_swizzle extension included") +# endif +#endif + +namespace glm { + // xx + template + GLM_INLINE glm::vec<2, T, Q> xx(const glm::vec<1, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.x); + } + + template + GLM_INLINE glm::vec<2, T, Q> xx(const glm::vec<2, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.x); + } + + template + GLM_INLINE glm::vec<2, T, Q> xx(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.x); + } + + template + GLM_INLINE glm::vec<2, T, Q> xx(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.x); + } + + // xy + template + GLM_INLINE glm::vec<2, T, Q> xy(const glm::vec<2, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.y); + } + + template + GLM_INLINE glm::vec<2, T, Q> xy(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.y); + } + + template + GLM_INLINE glm::vec<2, T, Q> xy(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.y); + } + + // xz + template + GLM_INLINE glm::vec<2, T, Q> xz(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.z); + } + + template + GLM_INLINE glm::vec<2, T, Q> xz(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.z); + } + + // xw + template + GLM_INLINE glm::vec<2, T, Q> xw(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.x, v.w); + } + + // yx + template + GLM_INLINE glm::vec<2, T, Q> yx(const glm::vec<2, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.x); + } + + template + GLM_INLINE glm::vec<2, T, Q> yx(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.x); + } + + template + GLM_INLINE glm::vec<2, T, Q> yx(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.x); + } + + // yy + template + GLM_INLINE glm::vec<2, T, Q> yy(const glm::vec<2, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.y); + } + + template + GLM_INLINE glm::vec<2, T, Q> yy(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.y); + } + + template + GLM_INLINE glm::vec<2, T, Q> yy(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.y); + } + + // yz + template + GLM_INLINE glm::vec<2, T, Q> yz(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.z); + } + + template + GLM_INLINE glm::vec<2, T, Q> yz(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.z); + } + + // yw + template + GLM_INLINE glm::vec<2, T, Q> yw(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.y, v.w); + } + + // zx + template + GLM_INLINE glm::vec<2, T, Q> zx(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.z, v.x); + } + + template + GLM_INLINE glm::vec<2, T, Q> zx(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.z, v.x); + } + + // zy + template + GLM_INLINE glm::vec<2, T, Q> zy(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.z, v.y); + } + + template + GLM_INLINE glm::vec<2, T, Q> zy(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.z, v.y); + } + + // zz + template + GLM_INLINE glm::vec<2, T, Q> zz(const glm::vec<3, T, Q> &v) { + return glm::vec<2, T, Q>(v.z, v.z); + } + + template + GLM_INLINE glm::vec<2, T, Q> zz(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.z, v.z); + } + + // zw + template + GLM_INLINE glm::vec<2, T, Q> zw(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.z, v.w); + } + + // wx + template + GLM_INLINE glm::vec<2, T, Q> wx(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.w, v.x); + } + + // wy + template + GLM_INLINE glm::vec<2, T, Q> wy(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.w, v.y); + } + + // wz + template + GLM_INLINE glm::vec<2, T, Q> wz(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.w, v.z); + } + + // ww + template + GLM_INLINE glm::vec<2, T, Q> ww(const glm::vec<4, T, Q> &v) { + return glm::vec<2, T, Q>(v.w, v.w); + } + + // xxx + template + GLM_INLINE glm::vec<3, T, Q> xxx(const glm::vec<1, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> xxx(const glm::vec<2, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> xxx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> xxx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.x); + } + + // xxy + template + GLM_INLINE glm::vec<3, T, Q> xxy(const glm::vec<2, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> xxy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> xxy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.y); + } + + // xxz + template + GLM_INLINE glm::vec<3, T, Q> xxz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> xxz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.z); + } + + // xxw + template + GLM_INLINE glm::vec<3, T, Q> xxw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.x, v.w); + } + + // xyx + template + GLM_INLINE glm::vec<3, T, Q> xyx(const glm::vec<2, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> xyx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> xyx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.x); + } + + // xyy + template + GLM_INLINE glm::vec<3, T, Q> xyy(const glm::vec<2, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> xyy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> xyy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.y); + } + + // xyz + template + GLM_INLINE glm::vec<3, T, Q> xyz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> xyz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.z); + } + + // xyw + template + GLM_INLINE glm::vec<3, T, Q> xyw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.y, v.w); + } + + // xzx + template + GLM_INLINE glm::vec<3, T, Q> xzx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.z, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> xzx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.z, v.x); + } + + // xzy + template + GLM_INLINE glm::vec<3, T, Q> xzy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.z, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> xzy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.z, v.y); + } + + // xzz + template + GLM_INLINE glm::vec<3, T, Q> xzz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.z, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> xzz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.z, v.z); + } + + // xzw + template + GLM_INLINE glm::vec<3, T, Q> xzw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.z, v.w); + } + + // xwx + template + GLM_INLINE glm::vec<3, T, Q> xwx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.w, v.x); + } + + // xwy + template + GLM_INLINE glm::vec<3, T, Q> xwy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.w, v.y); + } + + // xwz + template + GLM_INLINE glm::vec<3, T, Q> xwz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.w, v.z); + } + + // xww + template + GLM_INLINE glm::vec<3, T, Q> xww(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.x, v.w, v.w); + } + + // yxx + template + GLM_INLINE glm::vec<3, T, Q> yxx(const glm::vec<2, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> yxx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> yxx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.x); + } + + // yxy + template + GLM_INLINE glm::vec<3, T, Q> yxy(const glm::vec<2, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> yxy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> yxy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.y); + } + + // yxz + template + GLM_INLINE glm::vec<3, T, Q> yxz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> yxz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.z); + } + + // yxw + template + GLM_INLINE glm::vec<3, T, Q> yxw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.x, v.w); + } + + // yyx + template + GLM_INLINE glm::vec<3, T, Q> yyx(const glm::vec<2, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> yyx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> yyx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.x); + } + + // yyy + template + GLM_INLINE glm::vec<3, T, Q> yyy(const glm::vec<2, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> yyy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> yyy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.y); + } + + // yyz + template + GLM_INLINE glm::vec<3, T, Q> yyz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> yyz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.z); + } + + // yyw + template + GLM_INLINE glm::vec<3, T, Q> yyw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.y, v.w); + } + + // yzx + template + GLM_INLINE glm::vec<3, T, Q> yzx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.z, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> yzx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.z, v.x); + } + + // yzy + template + GLM_INLINE glm::vec<3, T, Q> yzy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.z, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> yzy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.z, v.y); + } + + // yzz + template + GLM_INLINE glm::vec<3, T, Q> yzz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.z, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> yzz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.z, v.z); + } + + // yzw + template + GLM_INLINE glm::vec<3, T, Q> yzw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.z, v.w); + } + + // ywx + template + GLM_INLINE glm::vec<3, T, Q> ywx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.w, v.x); + } + + // ywy + template + GLM_INLINE glm::vec<3, T, Q> ywy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.w, v.y); + } + + // ywz + template + GLM_INLINE glm::vec<3, T, Q> ywz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.w, v.z); + } + + // yww + template + GLM_INLINE glm::vec<3, T, Q> yww(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.y, v.w, v.w); + } + + // zxx + template + GLM_INLINE glm::vec<3, T, Q> zxx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.x, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> zxx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.x, v.x); + } + + // zxy + template + GLM_INLINE glm::vec<3, T, Q> zxy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.x, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> zxy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.x, v.y); + } + + // zxz + template + GLM_INLINE glm::vec<3, T, Q> zxz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.x, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> zxz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.x, v.z); + } + + // zxw + template + GLM_INLINE glm::vec<3, T, Q> zxw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.x, v.w); + } + + // zyx + template + GLM_INLINE glm::vec<3, T, Q> zyx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.y, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> zyx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.y, v.x); + } + + // zyy + template + GLM_INLINE glm::vec<3, T, Q> zyy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.y, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> zyy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.y, v.y); + } + + // zyz + template + GLM_INLINE glm::vec<3, T, Q> zyz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.y, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> zyz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.y, v.z); + } + + // zyw + template + GLM_INLINE glm::vec<3, T, Q> zyw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.y, v.w); + } + + // zzx + template + GLM_INLINE glm::vec<3, T, Q> zzx(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.z, v.x); + } + + template + GLM_INLINE glm::vec<3, T, Q> zzx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.z, v.x); + } + + // zzy + template + GLM_INLINE glm::vec<3, T, Q> zzy(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.z, v.y); + } + + template + GLM_INLINE glm::vec<3, T, Q> zzy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.z, v.y); + } + + // zzz + template + GLM_INLINE glm::vec<3, T, Q> zzz(const glm::vec<3, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.z, v.z); + } + + template + GLM_INLINE glm::vec<3, T, Q> zzz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.z, v.z); + } + + // zzw + template + GLM_INLINE glm::vec<3, T, Q> zzw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.z, v.w); + } + + // zwx + template + GLM_INLINE glm::vec<3, T, Q> zwx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.w, v.x); + } + + // zwy + template + GLM_INLINE glm::vec<3, T, Q> zwy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.w, v.y); + } + + // zwz + template + GLM_INLINE glm::vec<3, T, Q> zwz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.w, v.z); + } + + // zww + template + GLM_INLINE glm::vec<3, T, Q> zww(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.z, v.w, v.w); + } + + // wxx + template + GLM_INLINE glm::vec<3, T, Q> wxx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.x, v.x); + } + + // wxy + template + GLM_INLINE glm::vec<3, T, Q> wxy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.x, v.y); + } + + // wxz + template + GLM_INLINE glm::vec<3, T, Q> wxz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.x, v.z); + } + + // wxw + template + GLM_INLINE glm::vec<3, T, Q> wxw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.x, v.w); + } + + // wyx + template + GLM_INLINE glm::vec<3, T, Q> wyx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.y, v.x); + } + + // wyy + template + GLM_INLINE glm::vec<3, T, Q> wyy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.y, v.y); + } + + // wyz + template + GLM_INLINE glm::vec<3, T, Q> wyz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.y, v.z); + } + + // wyw + template + GLM_INLINE glm::vec<3, T, Q> wyw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.y, v.w); + } + + // wzx + template + GLM_INLINE glm::vec<3, T, Q> wzx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.z, v.x); + } + + // wzy + template + GLM_INLINE glm::vec<3, T, Q> wzy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.z, v.y); + } + + // wzz + template + GLM_INLINE glm::vec<3, T, Q> wzz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.z, v.z); + } + + // wzw + template + GLM_INLINE glm::vec<3, T, Q> wzw(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.z, v.w); + } + + // wwx + template + GLM_INLINE glm::vec<3, T, Q> wwx(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.w, v.x); + } + + // wwy + template + GLM_INLINE glm::vec<3, T, Q> wwy(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.w, v.y); + } + + // wwz + template + GLM_INLINE glm::vec<3, T, Q> wwz(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.w, v.z); + } + + // www + template + GLM_INLINE glm::vec<3, T, Q> www(const glm::vec<4, T, Q> &v) { + return glm::vec<3, T, Q>(v.w, v.w, v.w); + } + + // xxxx + template + GLM_INLINE glm::vec<4, T, Q> xxxx(const glm::vec<1, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxxx(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.x); + } + + // xxxy + template + GLM_INLINE glm::vec<4, T, Q> xxxy(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.y); + } + + // xxxz + template + GLM_INLINE glm::vec<4, T, Q> xxxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.z); + } + + // xxxw + template + GLM_INLINE glm::vec<4, T, Q> xxxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.x, v.w); + } + + // xxyx + template + GLM_INLINE glm::vec<4, T, Q> xxyx(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.x); + } + + // xxyy + template + GLM_INLINE glm::vec<4, T, Q> xxyy(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.y); + } + + // xxyz + template + GLM_INLINE glm::vec<4, T, Q> xxyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.z); + } + + // xxyw + template + GLM_INLINE glm::vec<4, T, Q> xxyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.y, v.w); + } + + // xxzx + template + GLM_INLINE glm::vec<4, T, Q> xxzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.z, v.x); + } + + // xxzy + template + GLM_INLINE glm::vec<4, T, Q> xxzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.z, v.y); + } + + // xxzz + template + GLM_INLINE glm::vec<4, T, Q> xxzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xxzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.z, v.z); + } + + // xxzw + template + GLM_INLINE glm::vec<4, T, Q> xxzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.z, v.w); + } + + // xxwx + template + GLM_INLINE glm::vec<4, T, Q> xxwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.w, v.x); + } + + // xxwy + template + GLM_INLINE glm::vec<4, T, Q> xxwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.w, v.y); + } + + // xxwz + template + GLM_INLINE glm::vec<4, T, Q> xxwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.w, v.z); + } + + // xxww + template + GLM_INLINE glm::vec<4, T, Q> xxww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.x, v.w, v.w); + } + + // xyxx + template + GLM_INLINE glm::vec<4, T, Q> xyxx(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.x); + } + + // xyxy + template + GLM_INLINE glm::vec<4, T, Q> xyxy(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.y); + } + + // xyxz + template + GLM_INLINE glm::vec<4, T, Q> xyxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.z); + } + + // xyxw + template + GLM_INLINE glm::vec<4, T, Q> xyxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.x, v.w); + } + + // xyyx + template + GLM_INLINE glm::vec<4, T, Q> xyyx(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.x); + } + + // xyyy + template + GLM_INLINE glm::vec<4, T, Q> xyyy(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.y); + } + + // xyyz + template + GLM_INLINE glm::vec<4, T, Q> xyyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.z); + } + + // xyyw + template + GLM_INLINE glm::vec<4, T, Q> xyyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.y, v.w); + } + + // xyzx + template + GLM_INLINE glm::vec<4, T, Q> xyzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.z, v.x); + } + + // xyzy + template + GLM_INLINE glm::vec<4, T, Q> xyzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.z, v.y); + } + + // xyzz + template + GLM_INLINE glm::vec<4, T, Q> xyzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xyzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.z, v.z); + } + + // xyzw + template + GLM_INLINE glm::vec<4, T, Q> xyzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.z, v.w); + } + + // xywx + template + GLM_INLINE glm::vec<4, T, Q> xywx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.w, v.x); + } + + // xywy + template + GLM_INLINE glm::vec<4, T, Q> xywy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.w, v.y); + } + + // xywz + template + GLM_INLINE glm::vec<4, T, Q> xywz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.w, v.z); + } + + // xyww + template + GLM_INLINE glm::vec<4, T, Q> xyww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.y, v.w, v.w); + } + + // xzxx + template + GLM_INLINE glm::vec<4, T, Q> xzxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.x, v.x); + } + + // xzxy + template + GLM_INLINE glm::vec<4, T, Q> xzxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.x, v.y); + } + + // xzxz + template + GLM_INLINE glm::vec<4, T, Q> xzxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.x, v.z); + } + + // xzxw + template + GLM_INLINE glm::vec<4, T, Q> xzxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.x, v.w); + } + + // xzyx + template + GLM_INLINE glm::vec<4, T, Q> xzyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.y, v.x); + } + + // xzyy + template + GLM_INLINE glm::vec<4, T, Q> xzyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.y, v.y); + } + + // xzyz + template + GLM_INLINE glm::vec<4, T, Q> xzyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.y, v.z); + } + + // xzyw + template + GLM_INLINE glm::vec<4, T, Q> xzyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.y, v.w); + } + + // xzzx + template + GLM_INLINE glm::vec<4, T, Q> xzzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.z, v.x); + } + + // xzzy + template + GLM_INLINE glm::vec<4, T, Q> xzzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.z, v.y); + } + + // xzzz + template + GLM_INLINE glm::vec<4, T, Q> xzzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> xzzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.z, v.z); + } + + // xzzw + template + GLM_INLINE glm::vec<4, T, Q> xzzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.z, v.w); + } + + // xzwx + template + GLM_INLINE glm::vec<4, T, Q> xzwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.w, v.x); + } + + // xzwy + template + GLM_INLINE glm::vec<4, T, Q> xzwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.w, v.y); + } + + // xzwz + template + GLM_INLINE glm::vec<4, T, Q> xzwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.w, v.z); + } + + // xzww + template + GLM_INLINE glm::vec<4, T, Q> xzww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.z, v.w, v.w); + } + + // xwxx + template + GLM_INLINE glm::vec<4, T, Q> xwxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.x, v.x); + } + + // xwxy + template + GLM_INLINE glm::vec<4, T, Q> xwxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.x, v.y); + } + + // xwxz + template + GLM_INLINE glm::vec<4, T, Q> xwxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.x, v.z); + } + + // xwxw + template + GLM_INLINE glm::vec<4, T, Q> xwxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.x, v.w); + } + + // xwyx + template + GLM_INLINE glm::vec<4, T, Q> xwyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.y, v.x); + } + + // xwyy + template + GLM_INLINE glm::vec<4, T, Q> xwyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.y, v.y); + } + + // xwyz + template + GLM_INLINE glm::vec<4, T, Q> xwyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.y, v.z); + } + + // xwyw + template + GLM_INLINE glm::vec<4, T, Q> xwyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.y, v.w); + } + + // xwzx + template + GLM_INLINE glm::vec<4, T, Q> xwzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.z, v.x); + } + + // xwzy + template + GLM_INLINE glm::vec<4, T, Q> xwzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.z, v.y); + } + + // xwzz + template + GLM_INLINE glm::vec<4, T, Q> xwzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.z, v.z); + } + + // xwzw + template + GLM_INLINE glm::vec<4, T, Q> xwzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.z, v.w); + } + + // xwwx + template + GLM_INLINE glm::vec<4, T, Q> xwwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.w, v.x); + } + + // xwwy + template + GLM_INLINE glm::vec<4, T, Q> xwwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.w, v.y); + } + + // xwwz + template + GLM_INLINE glm::vec<4, T, Q> xwwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.w, v.z); + } + + // xwww + template + GLM_INLINE glm::vec<4, T, Q> xwww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.x, v.w, v.w, v.w); + } + + // yxxx + template + GLM_INLINE glm::vec<4, T, Q> yxxx(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.x); + } + + // yxxy + template + GLM_INLINE glm::vec<4, T, Q> yxxy(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.y); + } + + // yxxz + template + GLM_INLINE glm::vec<4, T, Q> yxxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.z); + } + + // yxxw + template + GLM_INLINE glm::vec<4, T, Q> yxxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.x, v.w); + } + + // yxyx + template + GLM_INLINE glm::vec<4, T, Q> yxyx(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.x); + } + + // yxyy + template + GLM_INLINE glm::vec<4, T, Q> yxyy(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.y); + } + + // yxyz + template + GLM_INLINE glm::vec<4, T, Q> yxyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.z); + } + + // yxyw + template + GLM_INLINE glm::vec<4, T, Q> yxyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.y, v.w); + } + + // yxzx + template + GLM_INLINE glm::vec<4, T, Q> yxzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.z, v.x); + } + + // yxzy + template + GLM_INLINE glm::vec<4, T, Q> yxzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.z, v.y); + } + + // yxzz + template + GLM_INLINE glm::vec<4, T, Q> yxzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yxzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.z, v.z); + } + + // yxzw + template + GLM_INLINE glm::vec<4, T, Q> yxzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.z, v.w); + } + + // yxwx + template + GLM_INLINE glm::vec<4, T, Q> yxwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.w, v.x); + } + + // yxwy + template + GLM_INLINE glm::vec<4, T, Q> yxwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.w, v.y); + } + + // yxwz + template + GLM_INLINE glm::vec<4, T, Q> yxwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.w, v.z); + } + + // yxww + template + GLM_INLINE glm::vec<4, T, Q> yxww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.x, v.w, v.w); + } + + // yyxx + template + GLM_INLINE glm::vec<4, T, Q> yyxx(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.x); + } + + // yyxy + template + GLM_INLINE glm::vec<4, T, Q> yyxy(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.y); + } + + // yyxz + template + GLM_INLINE glm::vec<4, T, Q> yyxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.z); + } + + // yyxw + template + GLM_INLINE glm::vec<4, T, Q> yyxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.x, v.w); + } + + // yyyx + template + GLM_INLINE glm::vec<4, T, Q> yyyx(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.x); + } + + // yyyy + template + GLM_INLINE glm::vec<4, T, Q> yyyy(const glm::vec<2, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.y); + } + + // yyyz + template + GLM_INLINE glm::vec<4, T, Q> yyyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.z); + } + + // yyyw + template + GLM_INLINE glm::vec<4, T, Q> yyyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.y, v.w); + } + + // yyzx + template + GLM_INLINE glm::vec<4, T, Q> yyzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.z, v.x); + } + + // yyzy + template + GLM_INLINE glm::vec<4, T, Q> yyzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.z, v.y); + } + + // yyzz + template + GLM_INLINE glm::vec<4, T, Q> yyzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yyzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.z, v.z); + } + + // yyzw + template + GLM_INLINE glm::vec<4, T, Q> yyzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.z, v.w); + } + + // yywx + template + GLM_INLINE glm::vec<4, T, Q> yywx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.w, v.x); + } + + // yywy + template + GLM_INLINE glm::vec<4, T, Q> yywy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.w, v.y); + } + + // yywz + template + GLM_INLINE glm::vec<4, T, Q> yywz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.w, v.z); + } + + // yyww + template + GLM_INLINE glm::vec<4, T, Q> yyww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.y, v.w, v.w); + } + + // yzxx + template + GLM_INLINE glm::vec<4, T, Q> yzxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.x, v.x); + } + + // yzxy + template + GLM_INLINE glm::vec<4, T, Q> yzxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.x, v.y); + } + + // yzxz + template + GLM_INLINE glm::vec<4, T, Q> yzxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.x, v.z); + } + + // yzxw + template + GLM_INLINE glm::vec<4, T, Q> yzxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.x, v.w); + } + + // yzyx + template + GLM_INLINE glm::vec<4, T, Q> yzyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.y, v.x); + } + + // yzyy + template + GLM_INLINE glm::vec<4, T, Q> yzyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.y, v.y); + } + + // yzyz + template + GLM_INLINE glm::vec<4, T, Q> yzyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.y, v.z); + } + + // yzyw + template + GLM_INLINE glm::vec<4, T, Q> yzyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.y, v.w); + } + + // yzzx + template + GLM_INLINE glm::vec<4, T, Q> yzzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.z, v.x); + } + + // yzzy + template + GLM_INLINE glm::vec<4, T, Q> yzzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.z, v.y); + } + + // yzzz + template + GLM_INLINE glm::vec<4, T, Q> yzzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> yzzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.z, v.z); + } + + // yzzw + template + GLM_INLINE glm::vec<4, T, Q> yzzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.z, v.w); + } + + // yzwx + template + GLM_INLINE glm::vec<4, T, Q> yzwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.w, v.x); + } + + // yzwy + template + GLM_INLINE glm::vec<4, T, Q> yzwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.w, v.y); + } + + // yzwz + template + GLM_INLINE glm::vec<4, T, Q> yzwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.w, v.z); + } + + // yzww + template + GLM_INLINE glm::vec<4, T, Q> yzww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.z, v.w, v.w); + } + + // ywxx + template + GLM_INLINE glm::vec<4, T, Q> ywxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.x, v.x); + } + + // ywxy + template + GLM_INLINE glm::vec<4, T, Q> ywxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.x, v.y); + } + + // ywxz + template + GLM_INLINE glm::vec<4, T, Q> ywxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.x, v.z); + } + + // ywxw + template + GLM_INLINE glm::vec<4, T, Q> ywxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.x, v.w); + } + + // ywyx + template + GLM_INLINE glm::vec<4, T, Q> ywyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.y, v.x); + } + + // ywyy + template + GLM_INLINE glm::vec<4, T, Q> ywyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.y, v.y); + } + + // ywyz + template + GLM_INLINE glm::vec<4, T, Q> ywyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.y, v.z); + } + + // ywyw + template + GLM_INLINE glm::vec<4, T, Q> ywyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.y, v.w); + } + + // ywzx + template + GLM_INLINE glm::vec<4, T, Q> ywzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.z, v.x); + } + + // ywzy + template + GLM_INLINE glm::vec<4, T, Q> ywzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.z, v.y); + } + + // ywzz + template + GLM_INLINE glm::vec<4, T, Q> ywzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.z, v.z); + } + + // ywzw + template + GLM_INLINE glm::vec<4, T, Q> ywzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.z, v.w); + } + + // ywwx + template + GLM_INLINE glm::vec<4, T, Q> ywwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.w, v.x); + } + + // ywwy + template + GLM_INLINE glm::vec<4, T, Q> ywwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.w, v.y); + } + + // ywwz + template + GLM_INLINE glm::vec<4, T, Q> ywwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.w, v.z); + } + + // ywww + template + GLM_INLINE glm::vec<4, T, Q> ywww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.y, v.w, v.w, v.w); + } + + // zxxx + template + GLM_INLINE glm::vec<4, T, Q> zxxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.x, v.x); + } + + // zxxy + template + GLM_INLINE glm::vec<4, T, Q> zxxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.x, v.y); + } + + // zxxz + template + GLM_INLINE glm::vec<4, T, Q> zxxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.x, v.z); + } + + // zxxw + template + GLM_INLINE glm::vec<4, T, Q> zxxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.x, v.w); + } + + // zxyx + template + GLM_INLINE glm::vec<4, T, Q> zxyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.y, v.x); + } + + // zxyy + template + GLM_INLINE glm::vec<4, T, Q> zxyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.y, v.y); + } + + // zxyz + template + GLM_INLINE glm::vec<4, T, Q> zxyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.y, v.z); + } + + // zxyw + template + GLM_INLINE glm::vec<4, T, Q> zxyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.y, v.w); + } + + // zxzx + template + GLM_INLINE glm::vec<4, T, Q> zxzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.z, v.x); + } + + // zxzy + template + GLM_INLINE glm::vec<4, T, Q> zxzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.z, v.y); + } + + // zxzz + template + GLM_INLINE glm::vec<4, T, Q> zxzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zxzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.z, v.z); + } + + // zxzw + template + GLM_INLINE glm::vec<4, T, Q> zxzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.z, v.w); + } + + // zxwx + template + GLM_INLINE glm::vec<4, T, Q> zxwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.w, v.x); + } + + // zxwy + template + GLM_INLINE glm::vec<4, T, Q> zxwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.w, v.y); + } + + // zxwz + template + GLM_INLINE glm::vec<4, T, Q> zxwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.w, v.z); + } + + // zxww + template + GLM_INLINE glm::vec<4, T, Q> zxww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.x, v.w, v.w); + } + + // zyxx + template + GLM_INLINE glm::vec<4, T, Q> zyxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.x, v.x); + } + + // zyxy + template + GLM_INLINE glm::vec<4, T, Q> zyxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.x, v.y); + } + + // zyxz + template + GLM_INLINE glm::vec<4, T, Q> zyxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.x, v.z); + } + + // zyxw + template + GLM_INLINE glm::vec<4, T, Q> zyxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.x, v.w); + } + + // zyyx + template + GLM_INLINE glm::vec<4, T, Q> zyyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.y, v.x); + } + + // zyyy + template + GLM_INLINE glm::vec<4, T, Q> zyyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.y, v.y); + } + + // zyyz + template + GLM_INLINE glm::vec<4, T, Q> zyyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.y, v.z); + } + + // zyyw + template + GLM_INLINE glm::vec<4, T, Q> zyyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.y, v.w); + } + + // zyzx + template + GLM_INLINE glm::vec<4, T, Q> zyzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.z, v.x); + } + + // zyzy + template + GLM_INLINE glm::vec<4, T, Q> zyzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.z, v.y); + } + + // zyzz + template + GLM_INLINE glm::vec<4, T, Q> zyzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zyzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.z, v.z); + } + + // zyzw + template + GLM_INLINE glm::vec<4, T, Q> zyzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.z, v.w); + } + + // zywx + template + GLM_INLINE glm::vec<4, T, Q> zywx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.w, v.x); + } + + // zywy + template + GLM_INLINE glm::vec<4, T, Q> zywy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.w, v.y); + } + + // zywz + template + GLM_INLINE glm::vec<4, T, Q> zywz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.w, v.z); + } + + // zyww + template + GLM_INLINE glm::vec<4, T, Q> zyww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.y, v.w, v.w); + } + + // zzxx + template + GLM_INLINE glm::vec<4, T, Q> zzxx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.x, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.x, v.x); + } + + // zzxy + template + GLM_INLINE glm::vec<4, T, Q> zzxy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.x, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.x, v.y); + } + + // zzxz + template + GLM_INLINE glm::vec<4, T, Q> zzxz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.x, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.x, v.z); + } + + // zzxw + template + GLM_INLINE glm::vec<4, T, Q> zzxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.x, v.w); + } + + // zzyx + template + GLM_INLINE glm::vec<4, T, Q> zzyx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.y, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.y, v.x); + } + + // zzyy + template + GLM_INLINE glm::vec<4, T, Q> zzyy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.y, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.y, v.y); + } + + // zzyz + template + GLM_INLINE glm::vec<4, T, Q> zzyz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.y, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.y, v.z); + } + + // zzyw + template + GLM_INLINE glm::vec<4, T, Q> zzyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.y, v.w); + } + + // zzzx + template + GLM_INLINE glm::vec<4, T, Q> zzzx(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.z, v.x); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.z, v.x); + } + + // zzzy + template + GLM_INLINE glm::vec<4, T, Q> zzzy(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.z, v.y); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.z, v.y); + } + + // zzzz + template + GLM_INLINE glm::vec<4, T, Q> zzzz(const glm::vec<3, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.z, v.z); + } + + template + GLM_INLINE glm::vec<4, T, Q> zzzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.z, v.z); + } + + // zzzw + template + GLM_INLINE glm::vec<4, T, Q> zzzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.z, v.w); + } + + // zzwx + template + GLM_INLINE glm::vec<4, T, Q> zzwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.w, v.x); + } + + // zzwy + template + GLM_INLINE glm::vec<4, T, Q> zzwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.w, v.y); + } + + // zzwz + template + GLM_INLINE glm::vec<4, T, Q> zzwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.w, v.z); + } + + // zzww + template + GLM_INLINE glm::vec<4, T, Q> zzww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.z, v.w, v.w); + } + + // zwxx + template + GLM_INLINE glm::vec<4, T, Q> zwxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.x, v.x); + } + + // zwxy + template + GLM_INLINE glm::vec<4, T, Q> zwxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.x, v.y); + } + + // zwxz + template + GLM_INLINE glm::vec<4, T, Q> zwxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.x, v.z); + } + + // zwxw + template + GLM_INLINE glm::vec<4, T, Q> zwxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.x, v.w); + } + + // zwyx + template + GLM_INLINE glm::vec<4, T, Q> zwyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.y, v.x); + } + + // zwyy + template + GLM_INLINE glm::vec<4, T, Q> zwyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.y, v.y); + } + + // zwyz + template + GLM_INLINE glm::vec<4, T, Q> zwyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.y, v.z); + } + + // zwyw + template + GLM_INLINE glm::vec<4, T, Q> zwyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.y, v.w); + } + + // zwzx + template + GLM_INLINE glm::vec<4, T, Q> zwzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.z, v.x); + } + + // zwzy + template + GLM_INLINE glm::vec<4, T, Q> zwzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.z, v.y); + } + + // zwzz + template + GLM_INLINE glm::vec<4, T, Q> zwzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.z, v.z); + } + + // zwzw + template + GLM_INLINE glm::vec<4, T, Q> zwzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.z, v.w); + } + + // zwwx + template + GLM_INLINE glm::vec<4, T, Q> zwwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.w, v.x); + } + + // zwwy + template + GLM_INLINE glm::vec<4, T, Q> zwwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.w, v.y); + } + + // zwwz + template + GLM_INLINE glm::vec<4, T, Q> zwwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.w, v.z); + } + + // zwww + template + GLM_INLINE glm::vec<4, T, Q> zwww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.z, v.w, v.w, v.w); + } + + // wxxx + template + GLM_INLINE glm::vec<4, T, Q> wxxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.x, v.x); + } + + // wxxy + template + GLM_INLINE glm::vec<4, T, Q> wxxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.x, v.y); + } + + // wxxz + template + GLM_INLINE glm::vec<4, T, Q> wxxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.x, v.z); + } + + // wxxw + template + GLM_INLINE glm::vec<4, T, Q> wxxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.x, v.w); + } + + // wxyx + template + GLM_INLINE glm::vec<4, T, Q> wxyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.y, v.x); + } + + // wxyy + template + GLM_INLINE glm::vec<4, T, Q> wxyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.y, v.y); + } + + // wxyz + template + GLM_INLINE glm::vec<4, T, Q> wxyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.y, v.z); + } + + // wxyw + template + GLM_INLINE glm::vec<4, T, Q> wxyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.y, v.w); + } + + // wxzx + template + GLM_INLINE glm::vec<4, T, Q> wxzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.z, v.x); + } + + // wxzy + template + GLM_INLINE glm::vec<4, T, Q> wxzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.z, v.y); + } + + // wxzz + template + GLM_INLINE glm::vec<4, T, Q> wxzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.z, v.z); + } + + // wxzw + template + GLM_INLINE glm::vec<4, T, Q> wxzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.z, v.w); + } + + // wxwx + template + GLM_INLINE glm::vec<4, T, Q> wxwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.w, v.x); + } + + // wxwy + template + GLM_INLINE glm::vec<4, T, Q> wxwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.w, v.y); + } + + // wxwz + template + GLM_INLINE glm::vec<4, T, Q> wxwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.w, v.z); + } + + // wxww + template + GLM_INLINE glm::vec<4, T, Q> wxww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.x, v.w, v.w); + } + + // wyxx + template + GLM_INLINE glm::vec<4, T, Q> wyxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.x, v.x); + } + + // wyxy + template + GLM_INLINE glm::vec<4, T, Q> wyxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.x, v.y); + } + + // wyxz + template + GLM_INLINE glm::vec<4, T, Q> wyxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.x, v.z); + } + + // wyxw + template + GLM_INLINE glm::vec<4, T, Q> wyxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.x, v.w); + } + + // wyyx + template + GLM_INLINE glm::vec<4, T, Q> wyyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.y, v.x); + } + + // wyyy + template + GLM_INLINE glm::vec<4, T, Q> wyyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.y, v.y); + } + + // wyyz + template + GLM_INLINE glm::vec<4, T, Q> wyyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.y, v.z); + } + + // wyyw + template + GLM_INLINE glm::vec<4, T, Q> wyyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.y, v.w); + } + + // wyzx + template + GLM_INLINE glm::vec<4, T, Q> wyzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.z, v.x); + } + + // wyzy + template + GLM_INLINE glm::vec<4, T, Q> wyzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.z, v.y); + } + + // wyzz + template + GLM_INLINE glm::vec<4, T, Q> wyzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.z, v.z); + } + + // wyzw + template + GLM_INLINE glm::vec<4, T, Q> wyzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.z, v.w); + } + + // wywx + template + GLM_INLINE glm::vec<4, T, Q> wywx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.w, v.x); + } + + // wywy + template + GLM_INLINE glm::vec<4, T, Q> wywy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.w, v.y); + } + + // wywz + template + GLM_INLINE glm::vec<4, T, Q> wywz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.w, v.z); + } + + // wyww + template + GLM_INLINE glm::vec<4, T, Q> wyww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.y, v.w, v.w); + } + + // wzxx + template + GLM_INLINE glm::vec<4, T, Q> wzxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.x, v.x); + } + + // wzxy + template + GLM_INLINE glm::vec<4, T, Q> wzxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.x, v.y); + } + + // wzxz + template + GLM_INLINE glm::vec<4, T, Q> wzxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.x, v.z); + } + + // wzxw + template + GLM_INLINE glm::vec<4, T, Q> wzxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.x, v.w); + } + + // wzyx + template + GLM_INLINE glm::vec<4, T, Q> wzyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.y, v.x); + } + + // wzyy + template + GLM_INLINE glm::vec<4, T, Q> wzyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.y, v.y); + } + + // wzyz + template + GLM_INLINE glm::vec<4, T, Q> wzyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.y, v.z); + } + + // wzyw + template + GLM_INLINE glm::vec<4, T, Q> wzyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.y, v.w); + } + + // wzzx + template + GLM_INLINE glm::vec<4, T, Q> wzzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.z, v.x); + } + + // wzzy + template + GLM_INLINE glm::vec<4, T, Q> wzzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.z, v.y); + } + + // wzzz + template + GLM_INLINE glm::vec<4, T, Q> wzzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.z, v.z); + } + + // wzzw + template + GLM_INLINE glm::vec<4, T, Q> wzzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.z, v.w); + } + + // wzwx + template + GLM_INLINE glm::vec<4, T, Q> wzwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.w, v.x); + } + + // wzwy + template + GLM_INLINE glm::vec<4, T, Q> wzwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.w, v.y); + } + + // wzwz + template + GLM_INLINE glm::vec<4, T, Q> wzwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.w, v.z); + } + + // wzww + template + GLM_INLINE glm::vec<4, T, Q> wzww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.z, v.w, v.w); + } + + // wwxx + template + GLM_INLINE glm::vec<4, T, Q> wwxx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.x, v.x); + } + + // wwxy + template + GLM_INLINE glm::vec<4, T, Q> wwxy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.x, v.y); + } + + // wwxz + template + GLM_INLINE glm::vec<4, T, Q> wwxz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.x, v.z); + } + + // wwxw + template + GLM_INLINE glm::vec<4, T, Q> wwxw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.x, v.w); + } + + // wwyx + template + GLM_INLINE glm::vec<4, T, Q> wwyx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.y, v.x); + } + + // wwyy + template + GLM_INLINE glm::vec<4, T, Q> wwyy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.y, v.y); + } + + // wwyz + template + GLM_INLINE glm::vec<4, T, Q> wwyz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.y, v.z); + } + + // wwyw + template + GLM_INLINE glm::vec<4, T, Q> wwyw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.y, v.w); + } + + // wwzx + template + GLM_INLINE glm::vec<4, T, Q> wwzx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.z, v.x); + } + + // wwzy + template + GLM_INLINE glm::vec<4, T, Q> wwzy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.z, v.y); + } + + // wwzz + template + GLM_INLINE glm::vec<4, T, Q> wwzz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.z, v.z); + } + + // wwzw + template + GLM_INLINE glm::vec<4, T, Q> wwzw(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.z, v.w); + } + + // wwwx + template + GLM_INLINE glm::vec<4, T, Q> wwwx(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.w, v.x); + } + + // wwwy + template + GLM_INLINE glm::vec<4, T, Q> wwwy(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.w, v.y); + } + + // wwwz + template + GLM_INLINE glm::vec<4, T, Q> wwwz(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.w, v.z); + } + + // wwww + template + GLM_INLINE glm::vec<4, T, Q> wwww(const glm::vec<4, T, Q> &v) { + return glm::vec<4, T, Q>(v.w, v.w, v.w, v.w); + } + +} diff --git a/MacroLibX/third_party/glm/gtx/vector_angle.hpp b/MacroLibX/third_party/glm/gtx/vector_angle.hpp new file mode 100644 index 0000000..9ae4371 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/vector_angle.hpp @@ -0,0 +1,57 @@ +/// @ref gtx_vector_angle +/// @file glm/gtx/vector_angle.hpp +/// +/// @see core (dependence) +/// @see gtx_quaternion (dependence) +/// @see gtx_epsilon (dependence) +/// +/// @defgroup gtx_vector_angle GLM_GTX_vector_angle +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Compute angle between vectors + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/epsilon.hpp" +#include "../gtx/quaternion.hpp" +#include "../gtx/rotate_vector.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_vector_angle is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_vector_angle extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_vector_angle + /// @{ + + //! Returns the absolute angle between two vectors. + //! Parameters need to be normalized. + /// @see gtx_vector_angle extension. + template + GLM_FUNC_DECL T angle(vec const& x, vec const& y); + + //! Returns the oriented angle between two 2d vectors. + //! Parameters need to be normalized. + /// @see gtx_vector_angle extension. + template + GLM_FUNC_DECL T orientedAngle(vec<2, T, Q> const& x, vec<2, T, Q> const& y); + + //! Returns the oriented angle between two 3d vectors based from a reference axis. + //! Parameters need to be normalized. + /// @see gtx_vector_angle extension. + template + GLM_FUNC_DECL T orientedAngle(vec<3, T, Q> const& x, vec<3, T, Q> const& y, vec<3, T, Q> const& ref); + + /// @} +}// namespace glm + +#include "vector_angle.inl" diff --git a/MacroLibX/third_party/glm/gtx/vector_angle.inl b/MacroLibX/third_party/glm/gtx/vector_angle.inl new file mode 100644 index 0000000..a1f957a --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/vector_angle.inl @@ -0,0 +1,44 @@ +/// @ref gtx_vector_angle + +namespace glm +{ + template + GLM_FUNC_QUALIFIER genType angle + ( + genType const& x, + genType const& y + ) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'angle' only accept floating-point inputs"); + return acos(clamp(dot(x, y), genType(-1), genType(1))); + } + + template + GLM_FUNC_QUALIFIER T angle(vec const& x, vec const& y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'angle' only accept floating-point inputs"); + return acos(clamp(dot(x, y), T(-1), T(1))); + } + + //! \todo epsilon is hard coded to 0.01 + template + GLM_FUNC_QUALIFIER T orientedAngle(vec<2, T, Q> const& x, vec<2, T, Q> const& y) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'orientedAngle' only accept floating-point inputs"); + T const Angle(acos(clamp(dot(x, y), T(-1), T(1)))); + + if(all(epsilonEqual(y, glm::rotate(x, Angle), T(0.0001)))) + return Angle; + else + return -Angle; + } + + template + GLM_FUNC_QUALIFIER T orientedAngle(vec<3, T, Q> const& x, vec<3, T, Q> const& y, vec<3, T, Q> const& ref) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'orientedAngle' only accept floating-point inputs"); + + T const Angle(acos(clamp(dot(x, y), T(-1), T(1)))); + return mix(Angle, -Angle, dot(ref, cross(x, y)) < T(0)); + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/vector_query.hpp b/MacroLibX/third_party/glm/gtx/vector_query.hpp new file mode 100644 index 0000000..77c7b97 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/vector_query.hpp @@ -0,0 +1,66 @@ +/// @ref gtx_vector_query +/// @file glm/gtx/vector_query.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_vector_query GLM_GTX_vector_query +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Query informations of vector types + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include +#include + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_vector_query is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_vector_query extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_vector_query + /// @{ + + //! Check whether two vectors are collinears. + /// @see gtx_vector_query extensions. + template + GLM_FUNC_DECL bool areCollinear(vec const& v0, vec const& v1, T const& epsilon); + + //! Check whether two vectors are orthogonals. + /// @see gtx_vector_query extensions. + template + GLM_FUNC_DECL bool areOrthogonal(vec const& v0, vec const& v1, T const& epsilon); + + //! Check whether a vector is normalized. + /// @see gtx_vector_query extensions. + template + GLM_FUNC_DECL bool isNormalized(vec const& v, T const& epsilon); + + //! Check whether a vector is null. + /// @see gtx_vector_query extensions. + template + GLM_FUNC_DECL bool isNull(vec const& v, T const& epsilon); + + //! Check whether a each component of a vector is null. + /// @see gtx_vector_query extensions. + template + GLM_FUNC_DECL vec isCompNull(vec const& v, T const& epsilon); + + //! Check whether two vectors are orthonormal. + /// @see gtx_vector_query extensions. + template + GLM_FUNC_DECL bool areOrthonormal(vec const& v0, vec const& v1, T const& epsilon); + + /// @} +}// namespace glm + +#include "vector_query.inl" diff --git a/MacroLibX/third_party/glm/gtx/vector_query.inl b/MacroLibX/third_party/glm/gtx/vector_query.inl new file mode 100644 index 0000000..d1a5c9b --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/vector_query.inl @@ -0,0 +1,154 @@ +/// @ref gtx_vector_query + +#include + +namespace glm{ +namespace detail +{ + template + struct compute_areCollinear{}; + + template + struct compute_areCollinear<2, T, Q> + { + GLM_FUNC_QUALIFIER static bool call(vec<2, T, Q> const& v0, vec<2, T, Q> const& v1, T const& epsilon) + { + return length(cross(vec<3, T, Q>(v0, static_cast(0)), vec<3, T, Q>(v1, static_cast(0)))) < epsilon; + } + }; + + template + struct compute_areCollinear<3, T, Q> + { + GLM_FUNC_QUALIFIER static bool call(vec<3, T, Q> const& v0, vec<3, T, Q> const& v1, T const& epsilon) + { + return length(cross(v0, v1)) < epsilon; + } + }; + + template + struct compute_areCollinear<4, T, Q> + { + GLM_FUNC_QUALIFIER static bool call(vec<4, T, Q> const& v0, vec<4, T, Q> const& v1, T const& epsilon) + { + return length(cross(vec<3, T, Q>(v0), vec<3, T, Q>(v1))) < epsilon; + } + }; + + template + struct compute_isCompNull{}; + + template + struct compute_isCompNull<2, T, Q> + { + GLM_FUNC_QUALIFIER static vec<2, bool, Q> call(vec<2, T, Q> const& v, T const& epsilon) + { + return vec<2, bool, Q>( + (abs(v.x) < epsilon), + (abs(v.y) < epsilon)); + } + }; + + template + struct compute_isCompNull<3, T, Q> + { + GLM_FUNC_QUALIFIER static vec<3, bool, Q> call(vec<3, T, Q> const& v, T const& epsilon) + { + return vec<3, bool, Q>( + (abs(v.x) < epsilon), + (abs(v.y) < epsilon), + (abs(v.z) < epsilon)); + } + }; + + template + struct compute_isCompNull<4, T, Q> + { + GLM_FUNC_QUALIFIER static vec<4, bool, Q> call(vec<4, T, Q> const& v, T const& epsilon) + { + return vec<4, bool, Q>( + (abs(v.x) < epsilon), + (abs(v.y) < epsilon), + (abs(v.z) < epsilon), + (abs(v.w) < epsilon)); + } + }; + +}//namespace detail + + template + GLM_FUNC_QUALIFIER bool areCollinear(vec const& v0, vec const& v1, T const& epsilon) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'areCollinear' only accept floating-point inputs"); + + return detail::compute_areCollinear::call(v0, v1, epsilon); + } + + template + GLM_FUNC_QUALIFIER bool areOrthogonal(vec const& v0, vec const& v1, T const& epsilon) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'areOrthogonal' only accept floating-point inputs"); + + return abs(dot(v0, v1)) <= max( + static_cast(1), + length(v0)) * max(static_cast(1), length(v1)) * epsilon; + } + + template + GLM_FUNC_QUALIFIER bool isNormalized(vec const& v, T const& epsilon) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isNormalized' only accept floating-point inputs"); + + return abs(length(v) - static_cast(1)) <= static_cast(2) * epsilon; + } + + template + GLM_FUNC_QUALIFIER bool isNull(vec const& v, T const& epsilon) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isNull' only accept floating-point inputs"); + + return length(v) <= epsilon; + } + + template + GLM_FUNC_QUALIFIER vec isCompNull(vec const& v, T const& epsilon) + { + GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isCompNull' only accept floating-point inputs"); + + return detail::compute_isCompNull::call(v, epsilon); + } + + template + GLM_FUNC_QUALIFIER vec<2, bool, Q> isCompNull(vec<2, T, Q> const& v, T const& epsilon) + { + return vec<2, bool, Q>( + abs(v.x) < epsilon, + abs(v.y) < epsilon); + } + + template + GLM_FUNC_QUALIFIER vec<3, bool, Q> isCompNull(vec<3, T, Q> const& v, T const& epsilon) + { + return vec<3, bool, Q>( + abs(v.x) < epsilon, + abs(v.y) < epsilon, + abs(v.z) < epsilon); + } + + template + GLM_FUNC_QUALIFIER vec<4, bool, Q> isCompNull(vec<4, T, Q> const& v, T const& epsilon) + { + return vec<4, bool, Q>( + abs(v.x) < epsilon, + abs(v.y) < epsilon, + abs(v.z) < epsilon, + abs(v.w) < epsilon); + } + + template + GLM_FUNC_QUALIFIER bool areOrthonormal(vec const& v0, vec const& v1, T const& epsilon) + { + return isNormalized(v0, epsilon) && isNormalized(v1, epsilon) && (abs(dot(v0, v1)) <= epsilon); + } + +}//namespace glm diff --git a/MacroLibX/third_party/glm/gtx/wrap.hpp b/MacroLibX/third_party/glm/gtx/wrap.hpp new file mode 100644 index 0000000..02c5196 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/wrap.hpp @@ -0,0 +1,55 @@ +/// @ref gtx_wrap +/// @file glm/gtx/wrap.hpp +/// +/// @see core (dependence) +/// +/// @defgroup gtx_wrap GLM_GTX_wrap +/// @ingroup gtx +/// +/// Include to use the features of this extension. +/// +/// Wrapping mode of texture coordinates. + +#pragma once + +// Dependency: +#include "../glm.hpp" +#include "../gtc/vec1.hpp" + +#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) +# ifndef GLM_ENABLE_EXPERIMENTAL +# pragma message("GLM: GLM_GTX_wrap is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") +# else +# pragma message("GLM: GLM_GTX_wrap extension included") +# endif +#endif + +namespace glm +{ + /// @addtogroup gtx_wrap + /// @{ + + /// Simulate GL_CLAMP OpenGL wrap mode + /// @see gtx_wrap extension. + template + GLM_FUNC_DECL genType clamp(genType const& Texcoord); + + /// Simulate GL_REPEAT OpenGL wrap mode + /// @see gtx_wrap extension. + template + GLM_FUNC_DECL genType repeat(genType const& Texcoord); + + /// Simulate GL_MIRRORED_REPEAT OpenGL wrap mode + /// @see gtx_wrap extension. + template + GLM_FUNC_DECL genType mirrorClamp(genType const& Texcoord); + + /// Simulate GL_MIRROR_REPEAT OpenGL wrap mode + /// @see gtx_wrap extension. + template + GLM_FUNC_DECL genType mirrorRepeat(genType const& Texcoord); + + /// @} +}// namespace glm + +#include "wrap.inl" diff --git a/MacroLibX/third_party/glm/gtx/wrap.inl b/MacroLibX/third_party/glm/gtx/wrap.inl new file mode 100644 index 0000000..409a316 --- /dev/null +++ b/MacroLibX/third_party/glm/gtx/wrap.inl @@ -0,0 +1,57 @@ +/// @ref gtx_wrap + +namespace glm +{ + template + GLM_FUNC_QUALIFIER vec clamp(vec const& Texcoord) + { + return glm::clamp(Texcoord, vec(0), vec(1)); + } + + template + GLM_FUNC_QUALIFIER genType clamp(genType const& Texcoord) + { + return clamp(vec<1, genType, defaultp>(Texcoord)).x; + } + + template + GLM_FUNC_QUALIFIER vec repeat(vec const& Texcoord) + { + return glm::fract(Texcoord); + } + + template + GLM_FUNC_QUALIFIER genType repeat(genType const& Texcoord) + { + return repeat(vec<1, genType, defaultp>(Texcoord)).x; + } + + template + GLM_FUNC_QUALIFIER vec mirrorClamp(vec const& Texcoord) + { + return glm::fract(glm::abs(Texcoord)); + } + + template + GLM_FUNC_QUALIFIER genType mirrorClamp(genType const& Texcoord) + { + return mirrorClamp(vec<1, genType, defaultp>(Texcoord)).x; + } + + template + GLM_FUNC_QUALIFIER vec mirrorRepeat(vec const& Texcoord) + { + vec const Abs = glm::abs(Texcoord); + vec const Clamp = glm::mod(glm::floor(Abs), vec(2)); + vec const Floor = glm::floor(Abs); + vec const Rest = Abs - Floor; + vec const Mirror = Clamp + Rest; + return mix(Rest, vec(1) - Rest, glm::greaterThanEqual(Mirror, vec(1))); + } + + template + GLM_FUNC_QUALIFIER genType mirrorRepeat(genType const& Texcoord) + { + return mirrorRepeat(vec<1, genType, defaultp>(Texcoord)).x; + } +}//namespace glm diff --git a/MacroLibX/third_party/glm/integer.hpp b/MacroLibX/third_party/glm/integer.hpp new file mode 100644 index 0000000..8817db3 --- /dev/null +++ b/MacroLibX/third_party/glm/integer.hpp @@ -0,0 +1,212 @@ +/// @ref core +/// @file glm/integer.hpp +/// +/// @see GLSL 4.20.8 specification, section 8.8 Integer Functions +/// +/// @defgroup core_func_integer Integer functions +/// @ingroup core +/// +/// Provides GLSL functions on integer types +/// +/// These all operate component-wise. The description is per component. +/// The notation [a, b] means the set of bits from bit-number a through bit-number +/// b, inclusive. The lowest-order bit is bit 0. +/// +/// Include to use these core features. + +#pragma once + +#include "detail/qualifier.hpp" +#include "common.hpp" +#include "vector_relational.hpp" + +namespace glm +{ + /// @addtogroup core_func_integer + /// @{ + + /// Adds 32-bit unsigned integer x and y, returning the sum + /// modulo pow(2, 32). The value carry is set to 0 if the sum was + /// less than pow(2, 32), or to 1 otherwise. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// + /// @see GLSL uaddCarry man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL vec uaddCarry( + vec const& x, + vec const& y, + vec & carry); + + /// Subtracts the 32-bit unsigned integer y from x, returning + /// the difference if non-negative, or pow(2, 32) plus the difference + /// otherwise. The value borrow is set to 0 if x >= y, or to 1 otherwise. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// + /// @see GLSL usubBorrow man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL vec usubBorrow( + vec const& x, + vec const& y, + vec & borrow); + + /// Multiplies 32-bit integers x and y, producing a 64-bit + /// result. The 32 least-significant bits are returned in lsb. + /// The 32 most-significant bits are returned in msb. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// + /// @see GLSL umulExtended man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL void umulExtended( + vec const& x, + vec const& y, + vec & msb, + vec & lsb); + + /// Multiplies 32-bit integers x and y, producing a 64-bit + /// result. The 32 least-significant bits are returned in lsb. + /// The 32 most-significant bits are returned in msb. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// + /// @see GLSL imulExtended man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL void imulExtended( + vec const& x, + vec const& y, + vec & msb, + vec & lsb); + + /// Extracts bits [offset, offset + bits - 1] from value, + /// returning them in the least significant bits of the result. + /// For unsigned data types, the most significant bits of the + /// result will be set to zero. For signed data types, the + /// most significant bits will be set to the value of bit offset + base - 1. + /// + /// If bits is zero, the result will be zero. The result will be + /// undefined if offset or bits is negative, or if the sum of + /// offset and bits is greater than the number of bits used + /// to store the operand. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Signed or unsigned integer scalar types. + /// + /// @see GLSL bitfieldExtract man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL vec bitfieldExtract( + vec const& Value, + int Offset, + int Bits); + + /// Returns the insertion the bits least-significant bits of insert into base. + /// + /// The result will have bits [offset, offset + bits - 1] taken + /// from bits [0, bits - 1] of insert, and all other bits taken + /// directly from the corresponding bits of base. If bits is + /// zero, the result will simply be base. The result will be + /// undefined if offset or bits is negative, or if the sum of + /// offset and bits is greater than the number of bits used to + /// store the operand. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Signed or unsigned integer scalar or vector types. + /// + /// @see GLSL bitfieldInsert man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL vec bitfieldInsert( + vec const& Base, + vec const& Insert, + int Offset, + int Bits); + + /// Returns the reversal of the bits of value. + /// The bit numbered n of the result will be taken from bit (bits - 1) - n of value, + /// where bits is the total number of bits used to represent value. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Signed or unsigned integer scalar or vector types. + /// + /// @see GLSL bitfieldReverse man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL vec bitfieldReverse(vec const& v); + + /// Returns the number of bits set to 1 in the binary representation of value. + /// + /// @tparam genType Signed or unsigned integer scalar or vector types. + /// + /// @see GLSL bitCount man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL int bitCount(genType v); + + /// Returns the number of bits set to 1 in the binary representation of value. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Signed or unsigned integer scalar or vector types. + /// + /// @see GLSL bitCount man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL vec bitCount(vec const& v); + + /// Returns the bit number of the least significant bit set to + /// 1 in the binary representation of value. + /// If value is zero, -1 will be returned. + /// + /// @tparam genIUType Signed or unsigned integer scalar types. + /// + /// @see GLSL findLSB man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL int findLSB(genIUType x); + + /// Returns the bit number of the least significant bit set to + /// 1 in the binary representation of value. + /// If value is zero, -1 will be returned. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Signed or unsigned integer scalar types. + /// + /// @see GLSL findLSB man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL vec findLSB(vec const& v); + + /// Returns the bit number of the most significant bit in the binary representation of value. + /// For positive integers, the result will be the bit number of the most significant bit set to 1. + /// For negative integers, the result will be the bit number of the most significant + /// bit set to 0. For a value of zero or negative one, -1 will be returned. + /// + /// @tparam genIUType Signed or unsigned integer scalar types. + /// + /// @see GLSL findMSB man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL int findMSB(genIUType x); + + /// Returns the bit number of the most significant bit in the binary representation of value. + /// For positive integers, the result will be the bit number of the most significant bit set to 1. + /// For negative integers, the result will be the bit number of the most significant + /// bit set to 0. For a value of zero or negative one, -1 will be returned. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T Signed or unsigned integer scalar types. + /// + /// @see GLSL findMSB man page + /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions + template + GLM_FUNC_DECL vec findMSB(vec const& v); + + /// @} +}//namespace glm + +#include "detail/func_integer.inl" diff --git a/MacroLibX/third_party/glm/mat2x2.hpp b/MacroLibX/third_party/glm/mat2x2.hpp new file mode 100644 index 0000000..96bec96 --- /dev/null +++ b/MacroLibX/third_party/glm/mat2x2.hpp @@ -0,0 +1,9 @@ +/// @ref core +/// @file glm/mat2x2.hpp + +#pragma once +#include "./ext/matrix_double2x2.hpp" +#include "./ext/matrix_double2x2_precision.hpp" +#include "./ext/matrix_float2x2.hpp" +#include "./ext/matrix_float2x2_precision.hpp" + diff --git a/MacroLibX/third_party/glm/mat2x3.hpp b/MacroLibX/third_party/glm/mat2x3.hpp new file mode 100644 index 0000000..d68dc25 --- /dev/null +++ b/MacroLibX/third_party/glm/mat2x3.hpp @@ -0,0 +1,9 @@ +/// @ref core +/// @file glm/mat2x3.hpp + +#pragma once +#include "./ext/matrix_double2x3.hpp" +#include "./ext/matrix_double2x3_precision.hpp" +#include "./ext/matrix_float2x3.hpp" +#include "./ext/matrix_float2x3_precision.hpp" + diff --git a/MacroLibX/third_party/glm/mat2x4.hpp b/MacroLibX/third_party/glm/mat2x4.hpp new file mode 100644 index 0000000..b04b738 --- /dev/null +++ b/MacroLibX/third_party/glm/mat2x4.hpp @@ -0,0 +1,9 @@ +/// @ref core +/// @file glm/mat2x4.hpp + +#pragma once +#include "./ext/matrix_double2x4.hpp" +#include "./ext/matrix_double2x4_precision.hpp" +#include "./ext/matrix_float2x4.hpp" +#include "./ext/matrix_float2x4_precision.hpp" + diff --git a/MacroLibX/third_party/glm/mat3x2.hpp b/MacroLibX/third_party/glm/mat3x2.hpp new file mode 100644 index 0000000..c853153 --- /dev/null +++ b/MacroLibX/third_party/glm/mat3x2.hpp @@ -0,0 +1,9 @@ +/// @ref core +/// @file glm/mat3x2.hpp + +#pragma once +#include "./ext/matrix_double3x2.hpp" +#include "./ext/matrix_double3x2_precision.hpp" +#include "./ext/matrix_float3x2.hpp" +#include "./ext/matrix_float3x2_precision.hpp" + diff --git a/MacroLibX/third_party/glm/mat3x3.hpp b/MacroLibX/third_party/glm/mat3x3.hpp new file mode 100644 index 0000000..fd4fa31 --- /dev/null +++ b/MacroLibX/third_party/glm/mat3x3.hpp @@ -0,0 +1,8 @@ +/// @ref core +/// @file glm/mat3x3.hpp + +#pragma once +#include "./ext/matrix_double3x3.hpp" +#include "./ext/matrix_double3x3_precision.hpp" +#include "./ext/matrix_float3x3.hpp" +#include "./ext/matrix_float3x3_precision.hpp" diff --git a/MacroLibX/third_party/glm/mat3x4.hpp b/MacroLibX/third_party/glm/mat3x4.hpp new file mode 100644 index 0000000..6342bf5 --- /dev/null +++ b/MacroLibX/third_party/glm/mat3x4.hpp @@ -0,0 +1,8 @@ +/// @ref core +/// @file glm/mat3x4.hpp + +#pragma once +#include "./ext/matrix_double3x4.hpp" +#include "./ext/matrix_double3x4_precision.hpp" +#include "./ext/matrix_float3x4.hpp" +#include "./ext/matrix_float3x4_precision.hpp" diff --git a/MacroLibX/third_party/glm/mat4x2.hpp b/MacroLibX/third_party/glm/mat4x2.hpp new file mode 100644 index 0000000..e013e46 --- /dev/null +++ b/MacroLibX/third_party/glm/mat4x2.hpp @@ -0,0 +1,9 @@ +/// @ref core +/// @file glm/mat4x2.hpp + +#pragma once +#include "./ext/matrix_double4x2.hpp" +#include "./ext/matrix_double4x2_precision.hpp" +#include "./ext/matrix_float4x2.hpp" +#include "./ext/matrix_float4x2_precision.hpp" + diff --git a/MacroLibX/third_party/glm/mat4x3.hpp b/MacroLibX/third_party/glm/mat4x3.hpp new file mode 100644 index 0000000..205725a --- /dev/null +++ b/MacroLibX/third_party/glm/mat4x3.hpp @@ -0,0 +1,8 @@ +/// @ref core +/// @file glm/mat4x3.hpp + +#pragma once +#include "./ext/matrix_double4x3.hpp" +#include "./ext/matrix_double4x3_precision.hpp" +#include "./ext/matrix_float4x3.hpp" +#include "./ext/matrix_float4x3_precision.hpp" diff --git a/MacroLibX/third_party/glm/mat4x4.hpp b/MacroLibX/third_party/glm/mat4x4.hpp new file mode 100644 index 0000000..3515f7f --- /dev/null +++ b/MacroLibX/third_party/glm/mat4x4.hpp @@ -0,0 +1,9 @@ +/// @ref core +/// @file glm/mat4x4.hpp + +#pragma once +#include "./ext/matrix_double4x4.hpp" +#include "./ext/matrix_double4x4_precision.hpp" +#include "./ext/matrix_float4x4.hpp" +#include "./ext/matrix_float4x4_precision.hpp" + diff --git a/MacroLibX/third_party/glm/matrix.hpp b/MacroLibX/third_party/glm/matrix.hpp new file mode 100644 index 0000000..6badf53 --- /dev/null +++ b/MacroLibX/third_party/glm/matrix.hpp @@ -0,0 +1,161 @@ +/// @ref core +/// @file glm/matrix.hpp +/// +/// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions +/// +/// @defgroup core_func_matrix Matrix functions +/// @ingroup core +/// +/// Provides GLSL matrix functions. +/// +/// Include to use these core features. + +#pragma once + +// Dependencies +#include "detail/qualifier.hpp" +#include "detail/setup.hpp" +#include "vec2.hpp" +#include "vec3.hpp" +#include "vec4.hpp" +#include "mat2x2.hpp" +#include "mat2x3.hpp" +#include "mat2x4.hpp" +#include "mat3x2.hpp" +#include "mat3x3.hpp" +#include "mat3x4.hpp" +#include "mat4x2.hpp" +#include "mat4x3.hpp" +#include "mat4x4.hpp" + +namespace glm { +namespace detail +{ + template + struct outerProduct_trait{}; + + template + struct outerProduct_trait<2, 2, T, Q> + { + typedef mat<2, 2, T, Q> type; + }; + + template + struct outerProduct_trait<2, 3, T, Q> + { + typedef mat<3, 2, T, Q> type; + }; + + template + struct outerProduct_trait<2, 4, T, Q> + { + typedef mat<4, 2, T, Q> type; + }; + + template + struct outerProduct_trait<3, 2, T, Q> + { + typedef mat<2, 3, T, Q> type; + }; + + template + struct outerProduct_trait<3, 3, T, Q> + { + typedef mat<3, 3, T, Q> type; + }; + + template + struct outerProduct_trait<3, 4, T, Q> + { + typedef mat<4, 3, T, Q> type; + }; + + template + struct outerProduct_trait<4, 2, T, Q> + { + typedef mat<2, 4, T, Q> type; + }; + + template + struct outerProduct_trait<4, 3, T, Q> + { + typedef mat<3, 4, T, Q> type; + }; + + template + struct outerProduct_trait<4, 4, T, Q> + { + typedef mat<4, 4, T, Q> type; + }; +}//namespace detail + + /// @addtogroup core_func_matrix + /// @{ + + /// Multiply matrix x by matrix y component-wise, i.e., + /// result[i][j] is the scalar product of x[i][j] and y[i][j]. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number a column + /// @tparam R Integer between 1 and 4 included that qualify the number a row + /// @tparam T Floating-point or signed integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL matrixCompMult man page + /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions + template + GLM_FUNC_DECL mat matrixCompMult(mat const& x, mat const& y); + + /// Treats the first parameter c as a column vector + /// and the second parameter r as a row vector + /// and does a linear algebraic matrix multiply c * r. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number a column + /// @tparam R Integer between 1 and 4 included that qualify the number a row + /// @tparam T Floating-point or signed integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL outerProduct man page + /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions + template + GLM_FUNC_DECL typename detail::outerProduct_trait::type outerProduct(vec const& c, vec const& r); + + /// Returns the transposed matrix of x + /// + /// @tparam C Integer between 1 and 4 included that qualify the number a column + /// @tparam R Integer between 1 and 4 included that qualify the number a row + /// @tparam T Floating-point or signed integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL transpose man page + /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions + template + GLM_FUNC_DECL typename mat::transpose_type transpose(mat const& x); + + /// Return the determinant of a squared matrix. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number a column + /// @tparam R Integer between 1 and 4 included that qualify the number a row + /// @tparam T Floating-point or signed integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL determinant man page + /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions + template + GLM_FUNC_DECL T determinant(mat const& m); + + /// Return the inverse of a squared matrix. + /// + /// @tparam C Integer between 1 and 4 included that qualify the number a column + /// @tparam R Integer between 1 and 4 included that qualify the number a row + /// @tparam T Floating-point or signed integer scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL inverse man page + /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions + template + GLM_FUNC_DECL mat inverse(mat const& m); + + /// @} +}//namespace glm + +#include "detail/func_matrix.inl" diff --git a/MacroLibX/third_party/glm/packing.hpp b/MacroLibX/third_party/glm/packing.hpp new file mode 100644 index 0000000..ca83ac1 --- /dev/null +++ b/MacroLibX/third_party/glm/packing.hpp @@ -0,0 +1,173 @@ +/// @ref core +/// @file glm/packing.hpp +/// +/// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions +/// @see gtc_packing +/// +/// @defgroup core_func_packing Floating-Point Pack and Unpack Functions +/// @ingroup core +/// +/// Provides GLSL functions to pack and unpack half, single and double-precision floating point values into more compact integer types. +/// +/// These functions do not operate component-wise, rather as described in each case. +/// +/// Include to use these core features. + +#pragma once + +#include "./ext/vector_uint2.hpp" +#include "./ext/vector_float2.hpp" +#include "./ext/vector_float4.hpp" + +namespace glm +{ + /// @addtogroup core_func_packing + /// @{ + + /// First, converts each component of the normalized floating-point value v into 8- or 16-bit integer values. + /// Then, the results are packed into the returned 32-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packUnorm2x16: round(clamp(c, 0, +1) * 65535.0) + /// + /// The first component of the vector will be written to the least significant bits of the output; + /// the last component will be written to the most significant bits. + /// + /// @see GLSL packUnorm2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint packUnorm2x16(vec2 const& v); + + /// First, converts each component of the normalized floating-point value v into 8- or 16-bit integer values. + /// Then, the results are packed into the returned 32-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packSnorm2x16: round(clamp(v, -1, +1) * 32767.0) + /// + /// The first component of the vector will be written to the least significant bits of the output; + /// the last component will be written to the most significant bits. + /// + /// @see GLSL packSnorm2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint packSnorm2x16(vec2 const& v); + + /// First, converts each component of the normalized floating-point value v into 8- or 16-bit integer values. + /// Then, the results are packed into the returned 32-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packUnorm4x8: round(clamp(c, 0, +1) * 255.0) + /// + /// The first component of the vector will be written to the least significant bits of the output; + /// the last component will be written to the most significant bits. + /// + /// @see GLSL packUnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint packUnorm4x8(vec4 const& v); + + /// First, converts each component of the normalized floating-point value v into 8- or 16-bit integer values. + /// Then, the results are packed into the returned 32-bit unsigned integer. + /// + /// The conversion for component c of v to fixed point is done as follows: + /// packSnorm4x8: round(clamp(c, -1, +1) * 127.0) + /// + /// The first component of the vector will be written to the least significant bits of the output; + /// the last component will be written to the most significant bits. + /// + /// @see GLSL packSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint packSnorm4x8(vec4 const& v); + + /// First, unpacks a single 32-bit unsigned integer p into a pair of 16-bit unsigned integers, four 8-bit unsigned integers, or four 8-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned two- or four-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackUnorm2x16: f / 65535.0 + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see GLSL unpackUnorm2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec2 unpackUnorm2x16(uint p); + + /// First, unpacks a single 32-bit unsigned integer p into a pair of 16-bit unsigned integers, four 8-bit unsigned integers, or four 8-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned two- or four-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackSnorm2x16: clamp(f / 32767.0, -1, +1) + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see GLSL unpackSnorm2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec2 unpackSnorm2x16(uint p); + + /// First, unpacks a single 32-bit unsigned integer p into a pair of 16-bit unsigned integers, four 8-bit unsigned integers, or four 8-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned two- or four-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackUnorm4x8: f / 255.0 + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see GLSL unpackUnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec4 unpackUnorm4x8(uint p); + + /// First, unpacks a single 32-bit unsigned integer p into a pair of 16-bit unsigned integers, four 8-bit unsigned integers, or four 8-bit signed integers. + /// Then, each component is converted to a normalized floating-point value to generate the returned two- or four-component vector. + /// + /// The conversion for unpacked fixed-point value f to floating point is done as follows: + /// unpackSnorm4x8: clamp(f / 127.0, -1, +1) + /// + /// The first component of the returned vector will be extracted from the least significant bits of the input; + /// the last component will be extracted from the most significant bits. + /// + /// @see GLSL unpackSnorm4x8 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec4 unpackSnorm4x8(uint p); + + /// Returns a double-qualifier value obtained by packing the components of v into a 64-bit value. + /// If an IEEE 754 Inf or NaN is created, it will not signal, and the resulting floating point value is unspecified. + /// Otherwise, the bit- level representation of v is preserved. + /// The first vector component specifies the 32 least significant bits; + /// the second component specifies the 32 most significant bits. + /// + /// @see GLSL packDouble2x32 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL double packDouble2x32(uvec2 const& v); + + /// Returns a two-component unsigned integer vector representation of v. + /// The bit-level representation of v is preserved. + /// The first component of the vector contains the 32 least significant bits of the double; + /// the second component consists the 32 most significant bits. + /// + /// @see GLSL unpackDouble2x32 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uvec2 unpackDouble2x32(double v); + + /// Returns an unsigned integer obtained by converting the components of a two-component floating-point vector + /// to the 16-bit floating-point representation found in the OpenGL Specification, + /// and then packing these two 16- bit integers into a 32-bit unsigned integer. + /// The first vector component specifies the 16 least-significant bits of the result; + /// the second component specifies the 16 most-significant bits. + /// + /// @see GLSL packHalf2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL uint packHalf2x16(vec2 const& v); + + /// Returns a two-component floating-point vector with components obtained by unpacking a 32-bit unsigned integer into a pair of 16-bit values, + /// interpreting those values as 16-bit floating-point numbers according to the OpenGL Specification, + /// and converting them to 32-bit floating-point values. + /// The first component of the vector is obtained from the 16 least-significant bits of v; + /// the second component is obtained from the 16 most-significant bits of v. + /// + /// @see GLSL unpackHalf2x16 man page + /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions + GLM_FUNC_DECL vec2 unpackHalf2x16(uint v); + + /// @} +}//namespace glm + +#include "detail/func_packing.inl" diff --git a/MacroLibX/third_party/glm/simd/common.h b/MacroLibX/third_party/glm/simd/common.h new file mode 100644 index 0000000..9b017cb --- /dev/null +++ b/MacroLibX/third_party/glm/simd/common.h @@ -0,0 +1,240 @@ +/// @ref simd +/// @file glm/simd/common.h + +#pragma once + +#include "platform.h" + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_add(glm_f32vec4 a, glm_f32vec4 b) +{ + return _mm_add_ps(a, b); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_add(glm_f32vec4 a, glm_f32vec4 b) +{ + return _mm_add_ss(a, b); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_sub(glm_f32vec4 a, glm_f32vec4 b) +{ + return _mm_sub_ps(a, b); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_sub(glm_f32vec4 a, glm_f32vec4 b) +{ + return _mm_sub_ss(a, b); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_mul(glm_f32vec4 a, glm_f32vec4 b) +{ + return _mm_mul_ps(a, b); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_mul(glm_f32vec4 a, glm_f32vec4 b) +{ + return _mm_mul_ss(a, b); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_div(glm_f32vec4 a, glm_f32vec4 b) +{ + return _mm_div_ps(a, b); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_div(glm_f32vec4 a, glm_f32vec4 b) +{ + return _mm_div_ss(a, b); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_div_lowp(glm_f32vec4 a, glm_f32vec4 b) +{ + return glm_vec4_mul(a, _mm_rcp_ps(b)); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_swizzle_xyzw(glm_f32vec4 a) +{ +# if GLM_ARCH & GLM_ARCH_AVX2_BIT + return _mm_permute_ps(a, _MM_SHUFFLE(3, 2, 1, 0)); +# else + return _mm_shuffle_ps(a, a, _MM_SHUFFLE(3, 2, 1, 0)); +# endif +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_fma(glm_f32vec4 a, glm_f32vec4 b, glm_f32vec4 c) +{ +# if (GLM_ARCH & GLM_ARCH_AVX2_BIT) && !(GLM_COMPILER & GLM_COMPILER_CLANG) + return _mm_fmadd_ss(a, b, c); +# else + return _mm_add_ss(_mm_mul_ss(a, b), c); +# endif +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_fma(glm_f32vec4 a, glm_f32vec4 b, glm_f32vec4 c) +{ +# if (GLM_ARCH & GLM_ARCH_AVX2_BIT) && !(GLM_COMPILER & GLM_COMPILER_CLANG) + return _mm_fmadd_ps(a, b, c); +# else + return glm_vec4_add(glm_vec4_mul(a, b), c); +# endif +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_abs(glm_f32vec4 x) +{ + return _mm_and_ps(x, _mm_castsi128_ps(_mm_set1_epi32(0x7FFFFFFF))); +} + +GLM_FUNC_QUALIFIER glm_ivec4 glm_ivec4_abs(glm_ivec4 x) +{ +# if GLM_ARCH & GLM_ARCH_SSSE3_BIT + return _mm_sign_epi32(x, x); +# else + glm_ivec4 const sgn0 = _mm_srai_epi32(x, 31); + glm_ivec4 const inv0 = _mm_xor_si128(x, sgn0); + glm_ivec4 const sub0 = _mm_sub_epi32(inv0, sgn0); + return sub0; +# endif +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_sign(glm_vec4 x) +{ + glm_vec4 const zro0 = _mm_setzero_ps(); + glm_vec4 const cmp0 = _mm_cmplt_ps(x, zro0); + glm_vec4 const cmp1 = _mm_cmpgt_ps(x, zro0); + glm_vec4 const and0 = _mm_and_ps(cmp0, _mm_set1_ps(-1.0f)); + glm_vec4 const and1 = _mm_and_ps(cmp1, _mm_set1_ps(1.0f)); + glm_vec4 const or0 = _mm_or_ps(and0, and1); + return or0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_round(glm_vec4 x) +{ +# if GLM_ARCH & GLM_ARCH_SSE41_BIT + return _mm_round_ps(x, _MM_FROUND_TO_NEAREST_INT); +# else + glm_vec4 const sgn0 = _mm_castsi128_ps(_mm_set1_epi32(int(0x80000000))); + glm_vec4 const and0 = _mm_and_ps(sgn0, x); + glm_vec4 const or0 = _mm_or_ps(and0, _mm_set_ps1(8388608.0f)); + glm_vec4 const add0 = glm_vec4_add(x, or0); + glm_vec4 const sub0 = glm_vec4_sub(add0, or0); + return sub0; +# endif +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_floor(glm_vec4 x) +{ +# if GLM_ARCH & GLM_ARCH_SSE41_BIT + return _mm_floor_ps(x); +# else + glm_vec4 const rnd0 = glm_vec4_round(x); + glm_vec4 const cmp0 = _mm_cmplt_ps(x, rnd0); + glm_vec4 const and0 = _mm_and_ps(cmp0, _mm_set1_ps(1.0f)); + glm_vec4 const sub0 = glm_vec4_sub(rnd0, and0); + return sub0; +# endif +} + +/* trunc TODO +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_trunc(glm_vec4 x) +{ + return glm_vec4(); +} +*/ + +//roundEven +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_roundEven(glm_vec4 x) +{ + glm_vec4 const sgn0 = _mm_castsi128_ps(_mm_set1_epi32(int(0x80000000))); + glm_vec4 const and0 = _mm_and_ps(sgn0, x); + glm_vec4 const or0 = _mm_or_ps(and0, _mm_set_ps1(8388608.0f)); + glm_vec4 const add0 = glm_vec4_add(x, or0); + glm_vec4 const sub0 = glm_vec4_sub(add0, or0); + return sub0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_ceil(glm_vec4 x) +{ +# if GLM_ARCH & GLM_ARCH_SSE41_BIT + return _mm_ceil_ps(x); +# else + glm_vec4 const rnd0 = glm_vec4_round(x); + glm_vec4 const cmp0 = _mm_cmpgt_ps(x, rnd0); + glm_vec4 const and0 = _mm_and_ps(cmp0, _mm_set1_ps(1.0f)); + glm_vec4 const add0 = glm_vec4_add(rnd0, and0); + return add0; +# endif +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_fract(glm_vec4 x) +{ + glm_vec4 const flr0 = glm_vec4_floor(x); + glm_vec4 const sub0 = glm_vec4_sub(x, flr0); + return sub0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_mod(glm_vec4 x, glm_vec4 y) +{ + glm_vec4 const div0 = glm_vec4_div(x, y); + glm_vec4 const flr0 = glm_vec4_floor(div0); + glm_vec4 const mul0 = glm_vec4_mul(y, flr0); + glm_vec4 const sub0 = glm_vec4_sub(x, mul0); + return sub0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_clamp(glm_vec4 v, glm_vec4 minVal, glm_vec4 maxVal) +{ + glm_vec4 const min0 = _mm_min_ps(v, maxVal); + glm_vec4 const max0 = _mm_max_ps(min0, minVal); + return max0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_mix(glm_vec4 v1, glm_vec4 v2, glm_vec4 a) +{ + glm_vec4 const sub0 = glm_vec4_sub(_mm_set1_ps(1.0f), a); + glm_vec4 const mul0 = glm_vec4_mul(v1, sub0); + glm_vec4 const mad0 = glm_vec4_fma(v2, a, mul0); + return mad0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_step(glm_vec4 edge, glm_vec4 x) +{ + glm_vec4 const cmp = _mm_cmple_ps(x, edge); + return _mm_movemask_ps(cmp) == 0 ? _mm_set1_ps(1.0f) : _mm_setzero_ps(); +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_smoothstep(glm_vec4 edge0, glm_vec4 edge1, glm_vec4 x) +{ + glm_vec4 const sub0 = glm_vec4_sub(x, edge0); + glm_vec4 const sub1 = glm_vec4_sub(edge1, edge0); + glm_vec4 const div0 = glm_vec4_sub(sub0, sub1); + glm_vec4 const clp0 = glm_vec4_clamp(div0, _mm_setzero_ps(), _mm_set1_ps(1.0f)); + glm_vec4 const mul0 = glm_vec4_mul(_mm_set1_ps(2.0f), clp0); + glm_vec4 const sub2 = glm_vec4_sub(_mm_set1_ps(3.0f), mul0); + glm_vec4 const mul1 = glm_vec4_mul(clp0, clp0); + glm_vec4 const mul2 = glm_vec4_mul(mul1, sub2); + return mul2; +} + +// Agner Fog method +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_nan(glm_vec4 x) +{ + glm_ivec4 const t1 = _mm_castps_si128(x); // reinterpret as 32-bit integer + glm_ivec4 const t2 = _mm_sll_epi32(t1, _mm_cvtsi32_si128(1)); // shift out sign bit + glm_ivec4 const t3 = _mm_set1_epi32(int(0xFF000000)); // exponent mask + glm_ivec4 const t4 = _mm_and_si128(t2, t3); // exponent + glm_ivec4 const t5 = _mm_andnot_si128(t3, t2); // fraction + glm_ivec4 const Equal = _mm_cmpeq_epi32(t3, t4); + glm_ivec4 const Nequal = _mm_cmpeq_epi32(t5, _mm_setzero_si128()); + glm_ivec4 const And = _mm_and_si128(Equal, Nequal); + return _mm_castsi128_ps(And); // exponent = all 1s and fraction != 0 +} + +// Agner Fog method +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_inf(glm_vec4 x) +{ + glm_ivec4 const t1 = _mm_castps_si128(x); // reinterpret as 32-bit integer + glm_ivec4 const t2 = _mm_sll_epi32(t1, _mm_cvtsi32_si128(1)); // shift out sign bit + return _mm_castsi128_ps(_mm_cmpeq_epi32(t2, _mm_set1_epi32(int(0xFF000000)))); // exponent is all 1s, fraction is 0 +} + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/simd/exponential.h b/MacroLibX/third_party/glm/simd/exponential.h new file mode 100644 index 0000000..bc351d0 --- /dev/null +++ b/MacroLibX/third_party/glm/simd/exponential.h @@ -0,0 +1,20 @@ +/// @ref simd +/// @file glm/simd/experimental.h + +#pragma once + +#include "platform.h" + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_sqrt_lowp(glm_f32vec4 x) +{ + return _mm_mul_ss(_mm_rsqrt_ss(x), x); +} + +GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_sqrt_lowp(glm_f32vec4 x) +{ + return _mm_mul_ps(_mm_rsqrt_ps(x), x); +} + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/simd/geometric.h b/MacroLibX/third_party/glm/simd/geometric.h new file mode 100644 index 0000000..07d7cbc --- /dev/null +++ b/MacroLibX/third_party/glm/simd/geometric.h @@ -0,0 +1,124 @@ +/// @ref simd +/// @file glm/simd/geometric.h + +#pragma once + +#include "common.h" + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +GLM_FUNC_DECL glm_vec4 glm_vec4_dot(glm_vec4 v1, glm_vec4 v2); +GLM_FUNC_DECL glm_vec4 glm_vec1_dot(glm_vec4 v1, glm_vec4 v2); + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_length(glm_vec4 x) +{ + glm_vec4 const dot0 = glm_vec4_dot(x, x); + glm_vec4 const sqt0 = _mm_sqrt_ps(dot0); + return sqt0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_distance(glm_vec4 p0, glm_vec4 p1) +{ + glm_vec4 const sub0 = _mm_sub_ps(p0, p1); + glm_vec4 const len0 = glm_vec4_length(sub0); + return len0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_dot(glm_vec4 v1, glm_vec4 v2) +{ +# if GLM_ARCH & GLM_ARCH_AVX_BIT + return _mm_dp_ps(v1, v2, 0xff); +# elif GLM_ARCH & GLM_ARCH_SSE3_BIT + glm_vec4 const mul0 = _mm_mul_ps(v1, v2); + glm_vec4 const hadd0 = _mm_hadd_ps(mul0, mul0); + glm_vec4 const hadd1 = _mm_hadd_ps(hadd0, hadd0); + return hadd1; +# else + glm_vec4 const mul0 = _mm_mul_ps(v1, v2); + glm_vec4 const swp0 = _mm_shuffle_ps(mul0, mul0, _MM_SHUFFLE(2, 3, 0, 1)); + glm_vec4 const add0 = _mm_add_ps(mul0, swp0); + glm_vec4 const swp1 = _mm_shuffle_ps(add0, add0, _MM_SHUFFLE(0, 1, 2, 3)); + glm_vec4 const add1 = _mm_add_ps(add0, swp1); + return add1; +# endif +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec1_dot(glm_vec4 v1, glm_vec4 v2) +{ +# if GLM_ARCH & GLM_ARCH_AVX_BIT + return _mm_dp_ps(v1, v2, 0xff); +# elif GLM_ARCH & GLM_ARCH_SSE3_BIT + glm_vec4 const mul0 = _mm_mul_ps(v1, v2); + glm_vec4 const had0 = _mm_hadd_ps(mul0, mul0); + glm_vec4 const had1 = _mm_hadd_ps(had0, had0); + return had1; +# else + glm_vec4 const mul0 = _mm_mul_ps(v1, v2); + glm_vec4 const mov0 = _mm_movehl_ps(mul0, mul0); + glm_vec4 const add0 = _mm_add_ps(mov0, mul0); + glm_vec4 const swp1 = _mm_shuffle_ps(add0, add0, 1); + glm_vec4 const add1 = _mm_add_ss(add0, swp1); + return add1; +# endif +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_cross(glm_vec4 v1, glm_vec4 v2) +{ + glm_vec4 const swp0 = _mm_shuffle_ps(v1, v1, _MM_SHUFFLE(3, 0, 2, 1)); + glm_vec4 const swp1 = _mm_shuffle_ps(v1, v1, _MM_SHUFFLE(3, 1, 0, 2)); + glm_vec4 const swp2 = _mm_shuffle_ps(v2, v2, _MM_SHUFFLE(3, 0, 2, 1)); + glm_vec4 const swp3 = _mm_shuffle_ps(v2, v2, _MM_SHUFFLE(3, 1, 0, 2)); + glm_vec4 const mul0 = _mm_mul_ps(swp0, swp3); + glm_vec4 const mul1 = _mm_mul_ps(swp1, swp2); + glm_vec4 const sub0 = _mm_sub_ps(mul0, mul1); + return sub0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_normalize(glm_vec4 v) +{ + glm_vec4 const dot0 = glm_vec4_dot(v, v); + glm_vec4 const isr0 = _mm_rsqrt_ps(dot0); + glm_vec4 const mul0 = _mm_mul_ps(v, isr0); + return mul0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_faceforward(glm_vec4 N, glm_vec4 I, glm_vec4 Nref) +{ + glm_vec4 const dot0 = glm_vec4_dot(Nref, I); + glm_vec4 const sgn0 = glm_vec4_sign(dot0); + glm_vec4 const mul0 = _mm_mul_ps(sgn0, _mm_set1_ps(-1.0f)); + glm_vec4 const mul1 = _mm_mul_ps(N, mul0); + return mul1; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_reflect(glm_vec4 I, glm_vec4 N) +{ + glm_vec4 const dot0 = glm_vec4_dot(N, I); + glm_vec4 const mul0 = _mm_mul_ps(N, dot0); + glm_vec4 const mul1 = _mm_mul_ps(mul0, _mm_set1_ps(2.0f)); + glm_vec4 const sub0 = _mm_sub_ps(I, mul1); + return sub0; +} + +GLM_FUNC_QUALIFIER __m128 glm_vec4_refract(glm_vec4 I, glm_vec4 N, glm_vec4 eta) +{ + glm_vec4 const dot0 = glm_vec4_dot(N, I); + glm_vec4 const mul0 = _mm_mul_ps(eta, eta); + glm_vec4 const mul1 = _mm_mul_ps(dot0, dot0); + glm_vec4 const sub0 = _mm_sub_ps(_mm_set1_ps(1.0f), mul0); + glm_vec4 const sub1 = _mm_sub_ps(_mm_set1_ps(1.0f), mul1); + glm_vec4 const mul2 = _mm_mul_ps(sub0, sub1); + + if(_mm_movemask_ps(_mm_cmplt_ss(mul2, _mm_set1_ps(0.0f))) == 0) + return _mm_set1_ps(0.0f); + + glm_vec4 const sqt0 = _mm_sqrt_ps(mul2); + glm_vec4 const mad0 = glm_vec4_fma(eta, dot0, sqt0); + glm_vec4 const mul4 = _mm_mul_ps(mad0, N); + glm_vec4 const mul5 = _mm_mul_ps(eta, I); + glm_vec4 const sub2 = _mm_sub_ps(mul5, mul4); + + return sub2; +} + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/simd/integer.h b/MacroLibX/third_party/glm/simd/integer.h new file mode 100644 index 0000000..9381418 --- /dev/null +++ b/MacroLibX/third_party/glm/simd/integer.h @@ -0,0 +1,115 @@ +/// @ref simd +/// @file glm/simd/integer.h + +#pragma once + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +GLM_FUNC_QUALIFIER glm_uvec4 glm_i128_interleave(glm_uvec4 x) +{ + glm_uvec4 const Mask4 = _mm_set1_epi32(0x0000FFFF); + glm_uvec4 const Mask3 = _mm_set1_epi32(0x00FF00FF); + glm_uvec4 const Mask2 = _mm_set1_epi32(0x0F0F0F0F); + glm_uvec4 const Mask1 = _mm_set1_epi32(0x33333333); + glm_uvec4 const Mask0 = _mm_set1_epi32(0x55555555); + + glm_uvec4 Reg1; + glm_uvec4 Reg2; + + // REG1 = x; + // REG2 = y; + //Reg1 = _mm_unpacklo_epi64(x, y); + Reg1 = x; + + //REG1 = ((REG1 << 16) | REG1) & glm::uint64(0x0000FFFF0000FFFF); + //REG2 = ((REG2 << 16) | REG2) & glm::uint64(0x0000FFFF0000FFFF); + Reg2 = _mm_slli_si128(Reg1, 2); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask4); + + //REG1 = ((REG1 << 8) | REG1) & glm::uint64(0x00FF00FF00FF00FF); + //REG2 = ((REG2 << 8) | REG2) & glm::uint64(0x00FF00FF00FF00FF); + Reg2 = _mm_slli_si128(Reg1, 1); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask3); + + //REG1 = ((REG1 << 4) | REG1) & glm::uint64(0x0F0F0F0F0F0F0F0F); + //REG2 = ((REG2 << 4) | REG2) & glm::uint64(0x0F0F0F0F0F0F0F0F); + Reg2 = _mm_slli_epi32(Reg1, 4); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask2); + + //REG1 = ((REG1 << 2) | REG1) & glm::uint64(0x3333333333333333); + //REG2 = ((REG2 << 2) | REG2) & glm::uint64(0x3333333333333333); + Reg2 = _mm_slli_epi32(Reg1, 2); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask1); + + //REG1 = ((REG1 << 1) | REG1) & glm::uint64(0x5555555555555555); + //REG2 = ((REG2 << 1) | REG2) & glm::uint64(0x5555555555555555); + Reg2 = _mm_slli_epi32(Reg1, 1); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask0); + + //return REG1 | (REG2 << 1); + Reg2 = _mm_slli_epi32(Reg1, 1); + Reg2 = _mm_srli_si128(Reg2, 8); + Reg1 = _mm_or_si128(Reg1, Reg2); + + return Reg1; +} + +GLM_FUNC_QUALIFIER glm_uvec4 glm_i128_interleave2(glm_uvec4 x, glm_uvec4 y) +{ + glm_uvec4 const Mask4 = _mm_set1_epi32(0x0000FFFF); + glm_uvec4 const Mask3 = _mm_set1_epi32(0x00FF00FF); + glm_uvec4 const Mask2 = _mm_set1_epi32(0x0F0F0F0F); + glm_uvec4 const Mask1 = _mm_set1_epi32(0x33333333); + glm_uvec4 const Mask0 = _mm_set1_epi32(0x55555555); + + glm_uvec4 Reg1; + glm_uvec4 Reg2; + + // REG1 = x; + // REG2 = y; + Reg1 = _mm_unpacklo_epi64(x, y); + + //REG1 = ((REG1 << 16) | REG1) & glm::uint64(0x0000FFFF0000FFFF); + //REG2 = ((REG2 << 16) | REG2) & glm::uint64(0x0000FFFF0000FFFF); + Reg2 = _mm_slli_si128(Reg1, 2); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask4); + + //REG1 = ((REG1 << 8) | REG1) & glm::uint64(0x00FF00FF00FF00FF); + //REG2 = ((REG2 << 8) | REG2) & glm::uint64(0x00FF00FF00FF00FF); + Reg2 = _mm_slli_si128(Reg1, 1); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask3); + + //REG1 = ((REG1 << 4) | REG1) & glm::uint64(0x0F0F0F0F0F0F0F0F); + //REG2 = ((REG2 << 4) | REG2) & glm::uint64(0x0F0F0F0F0F0F0F0F); + Reg2 = _mm_slli_epi32(Reg1, 4); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask2); + + //REG1 = ((REG1 << 2) | REG1) & glm::uint64(0x3333333333333333); + //REG2 = ((REG2 << 2) | REG2) & glm::uint64(0x3333333333333333); + Reg2 = _mm_slli_epi32(Reg1, 2); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask1); + + //REG1 = ((REG1 << 1) | REG1) & glm::uint64(0x5555555555555555); + //REG2 = ((REG2 << 1) | REG2) & glm::uint64(0x5555555555555555); + Reg2 = _mm_slli_epi32(Reg1, 1); + Reg1 = _mm_or_si128(Reg2, Reg1); + Reg1 = _mm_and_si128(Reg1, Mask0); + + //return REG1 | (REG2 << 1); + Reg2 = _mm_slli_epi32(Reg1, 1); + Reg2 = _mm_srli_si128(Reg2, 8); + Reg1 = _mm_or_si128(Reg1, Reg2); + + return Reg1; +} + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/simd/matrix.h b/MacroLibX/third_party/glm/simd/matrix.h new file mode 100644 index 0000000..b6c42ea --- /dev/null +++ b/MacroLibX/third_party/glm/simd/matrix.h @@ -0,0 +1,1028 @@ +/// @ref simd +/// @file glm/simd/matrix.h + +#pragma once + +#include "geometric.h" + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +GLM_FUNC_QUALIFIER void glm_mat4_matrixCompMult(glm_vec4 const in1[4], glm_vec4 const in2[4], glm_vec4 out[4]) +{ + out[0] = _mm_mul_ps(in1[0], in2[0]); + out[1] = _mm_mul_ps(in1[1], in2[1]); + out[2] = _mm_mul_ps(in1[2], in2[2]); + out[3] = _mm_mul_ps(in1[3], in2[3]); +} + +GLM_FUNC_QUALIFIER void glm_mat4_add(glm_vec4 const in1[4], glm_vec4 const in2[4], glm_vec4 out[4]) +{ + out[0] = _mm_add_ps(in1[0], in2[0]); + out[1] = _mm_add_ps(in1[1], in2[1]); + out[2] = _mm_add_ps(in1[2], in2[2]); + out[3] = _mm_add_ps(in1[3], in2[3]); +} + +GLM_FUNC_QUALIFIER void glm_mat4_sub(glm_vec4 const in1[4], glm_vec4 const in2[4], glm_vec4 out[4]) +{ + out[0] = _mm_sub_ps(in1[0], in2[0]); + out[1] = _mm_sub_ps(in1[1], in2[1]); + out[2] = _mm_sub_ps(in1[2], in2[2]); + out[3] = _mm_sub_ps(in1[3], in2[3]); +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_mat4_mul_vec4(glm_vec4 const m[4], glm_vec4 v) +{ + __m128 v0 = _mm_shuffle_ps(v, v, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 v1 = _mm_shuffle_ps(v, v, _MM_SHUFFLE(1, 1, 1, 1)); + __m128 v2 = _mm_shuffle_ps(v, v, _MM_SHUFFLE(2, 2, 2, 2)); + __m128 v3 = _mm_shuffle_ps(v, v, _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 m0 = _mm_mul_ps(m[0], v0); + __m128 m1 = _mm_mul_ps(m[1], v1); + __m128 m2 = _mm_mul_ps(m[2], v2); + __m128 m3 = _mm_mul_ps(m[3], v3); + + __m128 a0 = _mm_add_ps(m0, m1); + __m128 a1 = _mm_add_ps(m2, m3); + __m128 a2 = _mm_add_ps(a0, a1); + + return a2; +} + +GLM_FUNC_QUALIFIER __m128 glm_vec4_mul_mat4(glm_vec4 v, glm_vec4 const m[4]) +{ + __m128 i0 = m[0]; + __m128 i1 = m[1]; + __m128 i2 = m[2]; + __m128 i3 = m[3]; + + __m128 m0 = _mm_mul_ps(v, i0); + __m128 m1 = _mm_mul_ps(v, i1); + __m128 m2 = _mm_mul_ps(v, i2); + __m128 m3 = _mm_mul_ps(v, i3); + + __m128 u0 = _mm_unpacklo_ps(m0, m1); + __m128 u1 = _mm_unpackhi_ps(m0, m1); + __m128 a0 = _mm_add_ps(u0, u1); + + __m128 u2 = _mm_unpacklo_ps(m2, m3); + __m128 u3 = _mm_unpackhi_ps(m2, m3); + __m128 a1 = _mm_add_ps(u2, u3); + + __m128 f0 = _mm_movelh_ps(a0, a1); + __m128 f1 = _mm_movehl_ps(a1, a0); + __m128 f2 = _mm_add_ps(f0, f1); + + return f2; +} + +GLM_FUNC_QUALIFIER void glm_mat4_mul(glm_vec4 const in1[4], glm_vec4 const in2[4], glm_vec4 out[4]) +{ + { + __m128 e0 = _mm_shuffle_ps(in2[0], in2[0], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 e1 = _mm_shuffle_ps(in2[0], in2[0], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 e2 = _mm_shuffle_ps(in2[0], in2[0], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 e3 = _mm_shuffle_ps(in2[0], in2[0], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 m0 = _mm_mul_ps(in1[0], e0); + __m128 m1 = _mm_mul_ps(in1[1], e1); + __m128 m2 = _mm_mul_ps(in1[2], e2); + __m128 m3 = _mm_mul_ps(in1[3], e3); + + __m128 a0 = _mm_add_ps(m0, m1); + __m128 a1 = _mm_add_ps(m2, m3); + __m128 a2 = _mm_add_ps(a0, a1); + + out[0] = a2; + } + + { + __m128 e0 = _mm_shuffle_ps(in2[1], in2[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 e1 = _mm_shuffle_ps(in2[1], in2[1], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 e2 = _mm_shuffle_ps(in2[1], in2[1], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 e3 = _mm_shuffle_ps(in2[1], in2[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 m0 = _mm_mul_ps(in1[0], e0); + __m128 m1 = _mm_mul_ps(in1[1], e1); + __m128 m2 = _mm_mul_ps(in1[2], e2); + __m128 m3 = _mm_mul_ps(in1[3], e3); + + __m128 a0 = _mm_add_ps(m0, m1); + __m128 a1 = _mm_add_ps(m2, m3); + __m128 a2 = _mm_add_ps(a0, a1); + + out[1] = a2; + } + + { + __m128 e0 = _mm_shuffle_ps(in2[2], in2[2], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 e1 = _mm_shuffle_ps(in2[2], in2[2], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 e2 = _mm_shuffle_ps(in2[2], in2[2], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 e3 = _mm_shuffle_ps(in2[2], in2[2], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 m0 = _mm_mul_ps(in1[0], e0); + __m128 m1 = _mm_mul_ps(in1[1], e1); + __m128 m2 = _mm_mul_ps(in1[2], e2); + __m128 m3 = _mm_mul_ps(in1[3], e3); + + __m128 a0 = _mm_add_ps(m0, m1); + __m128 a1 = _mm_add_ps(m2, m3); + __m128 a2 = _mm_add_ps(a0, a1); + + out[2] = a2; + } + + { + //(__m128&)_mm_shuffle_epi32(__m128i&)in2[0], _MM_SHUFFLE(3, 3, 3, 3)) + __m128 e0 = _mm_shuffle_ps(in2[3], in2[3], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 e1 = _mm_shuffle_ps(in2[3], in2[3], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 e2 = _mm_shuffle_ps(in2[3], in2[3], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 e3 = _mm_shuffle_ps(in2[3], in2[3], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 m0 = _mm_mul_ps(in1[0], e0); + __m128 m1 = _mm_mul_ps(in1[1], e1); + __m128 m2 = _mm_mul_ps(in1[2], e2); + __m128 m3 = _mm_mul_ps(in1[3], e3); + + __m128 a0 = _mm_add_ps(m0, m1); + __m128 a1 = _mm_add_ps(m2, m3); + __m128 a2 = _mm_add_ps(a0, a1); + + out[3] = a2; + } +} + +GLM_FUNC_QUALIFIER void glm_mat4_transpose(glm_vec4 const in[4], glm_vec4 out[4]) +{ + __m128 tmp0 = _mm_shuffle_ps(in[0], in[1], 0x44); + __m128 tmp2 = _mm_shuffle_ps(in[0], in[1], 0xEE); + __m128 tmp1 = _mm_shuffle_ps(in[2], in[3], 0x44); + __m128 tmp3 = _mm_shuffle_ps(in[2], in[3], 0xEE); + + out[0] = _mm_shuffle_ps(tmp0, tmp1, 0x88); + out[1] = _mm_shuffle_ps(tmp0, tmp1, 0xDD); + out[2] = _mm_shuffle_ps(tmp2, tmp3, 0x88); + out[3] = _mm_shuffle_ps(tmp2, tmp3, 0xDD); +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_mat4_determinant_highp(glm_vec4 const in[4]) +{ + __m128 Fac0; + { + // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + // valType SubFactor06 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; + // valType SubFactor13 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac0 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac1; + { + // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + // valType SubFactor07 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; + // valType SubFactor14 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac1 = _mm_sub_ps(Mul00, Mul01); + } + + + __m128 Fac2; + { + // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + // valType SubFactor08 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; + // valType SubFactor15 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac2 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac3; + { + // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + // valType SubFactor09 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; + // valType SubFactor16 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac3 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac4; + { + // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + // valType SubFactor10 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; + // valType SubFactor17 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac4 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac5; + { + // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + // valType SubFactor12 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; + // valType SubFactor18 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac5 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 SignA = _mm_set_ps( 1.0f,-1.0f, 1.0f,-1.0f); + __m128 SignB = _mm_set_ps(-1.0f, 1.0f,-1.0f, 1.0f); + + // m[1][0] + // m[0][0] + // m[0][0] + // m[0][0] + __m128 Temp0 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Vec0 = _mm_shuffle_ps(Temp0, Temp0, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][1] + // m[0][1] + // m[0][1] + // m[0][1] + __m128 Temp1 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Vec1 = _mm_shuffle_ps(Temp1, Temp1, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][2] + // m[0][2] + // m[0][2] + // m[0][2] + __m128 Temp2 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Vec2 = _mm_shuffle_ps(Temp2, Temp2, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][3] + // m[0][3] + // m[0][3] + // m[0][3] + __m128 Temp3 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Vec3 = _mm_shuffle_ps(Temp3, Temp3, _MM_SHUFFLE(2, 2, 2, 0)); + + // col0 + // + (Vec1[0] * Fac0[0] - Vec2[0] * Fac1[0] + Vec3[0] * Fac2[0]), + // - (Vec1[1] * Fac0[1] - Vec2[1] * Fac1[1] + Vec3[1] * Fac2[1]), + // + (Vec1[2] * Fac0[2] - Vec2[2] * Fac1[2] + Vec3[2] * Fac2[2]), + // - (Vec1[3] * Fac0[3] - Vec2[3] * Fac1[3] + Vec3[3] * Fac2[3]), + __m128 Mul00 = _mm_mul_ps(Vec1, Fac0); + __m128 Mul01 = _mm_mul_ps(Vec2, Fac1); + __m128 Mul02 = _mm_mul_ps(Vec3, Fac2); + __m128 Sub00 = _mm_sub_ps(Mul00, Mul01); + __m128 Add00 = _mm_add_ps(Sub00, Mul02); + __m128 Inv0 = _mm_mul_ps(SignB, Add00); + + // col1 + // - (Vec0[0] * Fac0[0] - Vec2[0] * Fac3[0] + Vec3[0] * Fac4[0]), + // + (Vec0[0] * Fac0[1] - Vec2[1] * Fac3[1] + Vec3[1] * Fac4[1]), + // - (Vec0[0] * Fac0[2] - Vec2[2] * Fac3[2] + Vec3[2] * Fac4[2]), + // + (Vec0[0] * Fac0[3] - Vec2[3] * Fac3[3] + Vec3[3] * Fac4[3]), + __m128 Mul03 = _mm_mul_ps(Vec0, Fac0); + __m128 Mul04 = _mm_mul_ps(Vec2, Fac3); + __m128 Mul05 = _mm_mul_ps(Vec3, Fac4); + __m128 Sub01 = _mm_sub_ps(Mul03, Mul04); + __m128 Add01 = _mm_add_ps(Sub01, Mul05); + __m128 Inv1 = _mm_mul_ps(SignA, Add01); + + // col2 + // + (Vec0[0] * Fac1[0] - Vec1[0] * Fac3[0] + Vec3[0] * Fac5[0]), + // - (Vec0[0] * Fac1[1] - Vec1[1] * Fac3[1] + Vec3[1] * Fac5[1]), + // + (Vec0[0] * Fac1[2] - Vec1[2] * Fac3[2] + Vec3[2] * Fac5[2]), + // - (Vec0[0] * Fac1[3] - Vec1[3] * Fac3[3] + Vec3[3] * Fac5[3]), + __m128 Mul06 = _mm_mul_ps(Vec0, Fac1); + __m128 Mul07 = _mm_mul_ps(Vec1, Fac3); + __m128 Mul08 = _mm_mul_ps(Vec3, Fac5); + __m128 Sub02 = _mm_sub_ps(Mul06, Mul07); + __m128 Add02 = _mm_add_ps(Sub02, Mul08); + __m128 Inv2 = _mm_mul_ps(SignB, Add02); + + // col3 + // - (Vec1[0] * Fac2[0] - Vec1[0] * Fac4[0] + Vec2[0] * Fac5[0]), + // + (Vec1[0] * Fac2[1] - Vec1[1] * Fac4[1] + Vec2[1] * Fac5[1]), + // - (Vec1[0] * Fac2[2] - Vec1[2] * Fac4[2] + Vec2[2] * Fac5[2]), + // + (Vec1[0] * Fac2[3] - Vec1[3] * Fac4[3] + Vec2[3] * Fac5[3])); + __m128 Mul09 = _mm_mul_ps(Vec0, Fac2); + __m128 Mul10 = _mm_mul_ps(Vec1, Fac4); + __m128 Mul11 = _mm_mul_ps(Vec2, Fac5); + __m128 Sub03 = _mm_sub_ps(Mul09, Mul10); + __m128 Add03 = _mm_add_ps(Sub03, Mul11); + __m128 Inv3 = _mm_mul_ps(SignA, Add03); + + __m128 Row0 = _mm_shuffle_ps(Inv0, Inv1, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Row1 = _mm_shuffle_ps(Inv2, Inv3, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Row2 = _mm_shuffle_ps(Row0, Row1, _MM_SHUFFLE(2, 0, 2, 0)); + + // valType Determinant = m[0][0] * Inverse[0][0] + // + m[0][1] * Inverse[1][0] + // + m[0][2] * Inverse[2][0] + // + m[0][3] * Inverse[3][0]; + __m128 Det0 = glm_vec4_dot(in[0], Row2); + return Det0; +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_mat4_determinant_lowp(glm_vec4 const m[4]) +{ + // _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128( + + //T SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + //T SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + //T SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + //T SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + //T SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + //T SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + + // First 2 columns + __m128 Swp2A = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[2]), _MM_SHUFFLE(0, 1, 1, 2))); + __m128 Swp3A = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[3]), _MM_SHUFFLE(3, 2, 3, 3))); + __m128 MulA = _mm_mul_ps(Swp2A, Swp3A); + + // Second 2 columns + __m128 Swp2B = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[2]), _MM_SHUFFLE(3, 2, 3, 3))); + __m128 Swp3B = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[3]), _MM_SHUFFLE(0, 1, 1, 2))); + __m128 MulB = _mm_mul_ps(Swp2B, Swp3B); + + // Columns subtraction + __m128 SubE = _mm_sub_ps(MulA, MulB); + + // Last 2 rows + __m128 Swp2C = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[2]), _MM_SHUFFLE(0, 0, 1, 2))); + __m128 Swp3C = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[3]), _MM_SHUFFLE(1, 2, 0, 0))); + __m128 MulC = _mm_mul_ps(Swp2C, Swp3C); + __m128 SubF = _mm_sub_ps(_mm_movehl_ps(MulC, MulC), MulC); + + //vec<4, T, Q> DetCof( + // + (m[1][1] * SubFactor00 - m[1][2] * SubFactor01 + m[1][3] * SubFactor02), + // - (m[1][0] * SubFactor00 - m[1][2] * SubFactor03 + m[1][3] * SubFactor04), + // + (m[1][0] * SubFactor01 - m[1][1] * SubFactor03 + m[1][3] * SubFactor05), + // - (m[1][0] * SubFactor02 - m[1][1] * SubFactor04 + m[1][2] * SubFactor05)); + + __m128 SubFacA = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(SubE), _MM_SHUFFLE(2, 1, 0, 0))); + __m128 SwpFacA = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[1]), _MM_SHUFFLE(0, 0, 0, 1))); + __m128 MulFacA = _mm_mul_ps(SwpFacA, SubFacA); + + __m128 SubTmpB = _mm_shuffle_ps(SubE, SubF, _MM_SHUFFLE(0, 0, 3, 1)); + __m128 SubFacB = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(SubTmpB), _MM_SHUFFLE(3, 1, 1, 0)));//SubF[0], SubE[3], SubE[3], SubE[1]; + __m128 SwpFacB = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[1]), _MM_SHUFFLE(1, 1, 2, 2))); + __m128 MulFacB = _mm_mul_ps(SwpFacB, SubFacB); + + __m128 SubRes = _mm_sub_ps(MulFacA, MulFacB); + + __m128 SubTmpC = _mm_shuffle_ps(SubE, SubF, _MM_SHUFFLE(1, 0, 2, 2)); + __m128 SubFacC = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(SubTmpC), _MM_SHUFFLE(3, 3, 2, 0))); + __m128 SwpFacC = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[1]), _MM_SHUFFLE(2, 3, 3, 3))); + __m128 MulFacC = _mm_mul_ps(SwpFacC, SubFacC); + + __m128 AddRes = _mm_add_ps(SubRes, MulFacC); + __m128 DetCof = _mm_mul_ps(AddRes, _mm_setr_ps( 1.0f,-1.0f, 1.0f,-1.0f)); + + //return m[0][0] * DetCof[0] + // + m[0][1] * DetCof[1] + // + m[0][2] * DetCof[2] + // + m[0][3] * DetCof[3]; + + return glm_vec4_dot(m[0], DetCof); +} + +GLM_FUNC_QUALIFIER glm_vec4 glm_mat4_determinant(glm_vec4 const m[4]) +{ + // _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(add) + + //T SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + //T SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + //T SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + //T SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + //T SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + //T SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + + // First 2 columns + __m128 Swp2A = _mm_shuffle_ps(m[2], m[2], _MM_SHUFFLE(0, 1, 1, 2)); + __m128 Swp3A = _mm_shuffle_ps(m[3], m[3], _MM_SHUFFLE(3, 2, 3, 3)); + __m128 MulA = _mm_mul_ps(Swp2A, Swp3A); + + // Second 2 columns + __m128 Swp2B = _mm_shuffle_ps(m[2], m[2], _MM_SHUFFLE(3, 2, 3, 3)); + __m128 Swp3B = _mm_shuffle_ps(m[3], m[3], _MM_SHUFFLE(0, 1, 1, 2)); + __m128 MulB = _mm_mul_ps(Swp2B, Swp3B); + + // Columns subtraction + __m128 SubE = _mm_sub_ps(MulA, MulB); + + // Last 2 rows + __m128 Swp2C = _mm_shuffle_ps(m[2], m[2], _MM_SHUFFLE(0, 0, 1, 2)); + __m128 Swp3C = _mm_shuffle_ps(m[3], m[3], _MM_SHUFFLE(1, 2, 0, 0)); + __m128 MulC = _mm_mul_ps(Swp2C, Swp3C); + __m128 SubF = _mm_sub_ps(_mm_movehl_ps(MulC, MulC), MulC); + + //vec<4, T, Q> DetCof( + // + (m[1][1] * SubFactor00 - m[1][2] * SubFactor01 + m[1][3] * SubFactor02), + // - (m[1][0] * SubFactor00 - m[1][2] * SubFactor03 + m[1][3] * SubFactor04), + // + (m[1][0] * SubFactor01 - m[1][1] * SubFactor03 + m[1][3] * SubFactor05), + // - (m[1][0] * SubFactor02 - m[1][1] * SubFactor04 + m[1][2] * SubFactor05)); + + __m128 SubFacA = _mm_shuffle_ps(SubE, SubE, _MM_SHUFFLE(2, 1, 0, 0)); + __m128 SwpFacA = _mm_shuffle_ps(m[1], m[1], _MM_SHUFFLE(0, 0, 0, 1)); + __m128 MulFacA = _mm_mul_ps(SwpFacA, SubFacA); + + __m128 SubTmpB = _mm_shuffle_ps(SubE, SubF, _MM_SHUFFLE(0, 0, 3, 1)); + __m128 SubFacB = _mm_shuffle_ps(SubTmpB, SubTmpB, _MM_SHUFFLE(3, 1, 1, 0));//SubF[0], SubE[3], SubE[3], SubE[1]; + __m128 SwpFacB = _mm_shuffle_ps(m[1], m[1], _MM_SHUFFLE(1, 1, 2, 2)); + __m128 MulFacB = _mm_mul_ps(SwpFacB, SubFacB); + + __m128 SubRes = _mm_sub_ps(MulFacA, MulFacB); + + __m128 SubTmpC = _mm_shuffle_ps(SubE, SubF, _MM_SHUFFLE(1, 0, 2, 2)); + __m128 SubFacC = _mm_shuffle_ps(SubTmpC, SubTmpC, _MM_SHUFFLE(3, 3, 2, 0)); + __m128 SwpFacC = _mm_shuffle_ps(m[1], m[1], _MM_SHUFFLE(2, 3, 3, 3)); + __m128 MulFacC = _mm_mul_ps(SwpFacC, SubFacC); + + __m128 AddRes = _mm_add_ps(SubRes, MulFacC); + __m128 DetCof = _mm_mul_ps(AddRes, _mm_setr_ps( 1.0f,-1.0f, 1.0f,-1.0f)); + + //return m[0][0] * DetCof[0] + // + m[0][1] * DetCof[1] + // + m[0][2] * DetCof[2] + // + m[0][3] * DetCof[3]; + + return glm_vec4_dot(m[0], DetCof); +} + +GLM_FUNC_QUALIFIER void glm_mat4_inverse(glm_vec4 const in[4], glm_vec4 out[4]) +{ + __m128 Fac0; + { + // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + // valType SubFactor06 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; + // valType SubFactor13 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac0 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac1; + { + // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + // valType SubFactor07 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; + // valType SubFactor14 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac1 = _mm_sub_ps(Mul00, Mul01); + } + + + __m128 Fac2; + { + // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + // valType SubFactor08 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; + // valType SubFactor15 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac2 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac3; + { + // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + // valType SubFactor09 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; + // valType SubFactor16 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac3 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac4; + { + // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + // valType SubFactor10 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; + // valType SubFactor17 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac4 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac5; + { + // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + // valType SubFactor12 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; + // valType SubFactor18 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac5 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 SignA = _mm_set_ps( 1.0f,-1.0f, 1.0f,-1.0f); + __m128 SignB = _mm_set_ps(-1.0f, 1.0f,-1.0f, 1.0f); + + // m[1][0] + // m[0][0] + // m[0][0] + // m[0][0] + __m128 Temp0 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Vec0 = _mm_shuffle_ps(Temp0, Temp0, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][1] + // m[0][1] + // m[0][1] + // m[0][1] + __m128 Temp1 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Vec1 = _mm_shuffle_ps(Temp1, Temp1, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][2] + // m[0][2] + // m[0][2] + // m[0][2] + __m128 Temp2 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Vec2 = _mm_shuffle_ps(Temp2, Temp2, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][3] + // m[0][3] + // m[0][3] + // m[0][3] + __m128 Temp3 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Vec3 = _mm_shuffle_ps(Temp3, Temp3, _MM_SHUFFLE(2, 2, 2, 0)); + + // col0 + // + (Vec1[0] * Fac0[0] - Vec2[0] * Fac1[0] + Vec3[0] * Fac2[0]), + // - (Vec1[1] * Fac0[1] - Vec2[1] * Fac1[1] + Vec3[1] * Fac2[1]), + // + (Vec1[2] * Fac0[2] - Vec2[2] * Fac1[2] + Vec3[2] * Fac2[2]), + // - (Vec1[3] * Fac0[3] - Vec2[3] * Fac1[3] + Vec3[3] * Fac2[3]), + __m128 Mul00 = _mm_mul_ps(Vec1, Fac0); + __m128 Mul01 = _mm_mul_ps(Vec2, Fac1); + __m128 Mul02 = _mm_mul_ps(Vec3, Fac2); + __m128 Sub00 = _mm_sub_ps(Mul00, Mul01); + __m128 Add00 = _mm_add_ps(Sub00, Mul02); + __m128 Inv0 = _mm_mul_ps(SignB, Add00); + + // col1 + // - (Vec0[0] * Fac0[0] - Vec2[0] * Fac3[0] + Vec3[0] * Fac4[0]), + // + (Vec0[0] * Fac0[1] - Vec2[1] * Fac3[1] + Vec3[1] * Fac4[1]), + // - (Vec0[0] * Fac0[2] - Vec2[2] * Fac3[2] + Vec3[2] * Fac4[2]), + // + (Vec0[0] * Fac0[3] - Vec2[3] * Fac3[3] + Vec3[3] * Fac4[3]), + __m128 Mul03 = _mm_mul_ps(Vec0, Fac0); + __m128 Mul04 = _mm_mul_ps(Vec2, Fac3); + __m128 Mul05 = _mm_mul_ps(Vec3, Fac4); + __m128 Sub01 = _mm_sub_ps(Mul03, Mul04); + __m128 Add01 = _mm_add_ps(Sub01, Mul05); + __m128 Inv1 = _mm_mul_ps(SignA, Add01); + + // col2 + // + (Vec0[0] * Fac1[0] - Vec1[0] * Fac3[0] + Vec3[0] * Fac5[0]), + // - (Vec0[0] * Fac1[1] - Vec1[1] * Fac3[1] + Vec3[1] * Fac5[1]), + // + (Vec0[0] * Fac1[2] - Vec1[2] * Fac3[2] + Vec3[2] * Fac5[2]), + // - (Vec0[0] * Fac1[3] - Vec1[3] * Fac3[3] + Vec3[3] * Fac5[3]), + __m128 Mul06 = _mm_mul_ps(Vec0, Fac1); + __m128 Mul07 = _mm_mul_ps(Vec1, Fac3); + __m128 Mul08 = _mm_mul_ps(Vec3, Fac5); + __m128 Sub02 = _mm_sub_ps(Mul06, Mul07); + __m128 Add02 = _mm_add_ps(Sub02, Mul08); + __m128 Inv2 = _mm_mul_ps(SignB, Add02); + + // col3 + // - (Vec1[0] * Fac2[0] - Vec1[0] * Fac4[0] + Vec2[0] * Fac5[0]), + // + (Vec1[0] * Fac2[1] - Vec1[1] * Fac4[1] + Vec2[1] * Fac5[1]), + // - (Vec1[0] * Fac2[2] - Vec1[2] * Fac4[2] + Vec2[2] * Fac5[2]), + // + (Vec1[0] * Fac2[3] - Vec1[3] * Fac4[3] + Vec2[3] * Fac5[3])); + __m128 Mul09 = _mm_mul_ps(Vec0, Fac2); + __m128 Mul10 = _mm_mul_ps(Vec1, Fac4); + __m128 Mul11 = _mm_mul_ps(Vec2, Fac5); + __m128 Sub03 = _mm_sub_ps(Mul09, Mul10); + __m128 Add03 = _mm_add_ps(Sub03, Mul11); + __m128 Inv3 = _mm_mul_ps(SignA, Add03); + + __m128 Row0 = _mm_shuffle_ps(Inv0, Inv1, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Row1 = _mm_shuffle_ps(Inv2, Inv3, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Row2 = _mm_shuffle_ps(Row0, Row1, _MM_SHUFFLE(2, 0, 2, 0)); + + // valType Determinant = m[0][0] * Inverse[0][0] + // + m[0][1] * Inverse[1][0] + // + m[0][2] * Inverse[2][0] + // + m[0][3] * Inverse[3][0]; + __m128 Det0 = glm_vec4_dot(in[0], Row2); + __m128 Rcp0 = _mm_div_ps(_mm_set1_ps(1.0f), Det0); + //__m128 Rcp0 = _mm_rcp_ps(Det0); + + // Inverse /= Determinant; + out[0] = _mm_mul_ps(Inv0, Rcp0); + out[1] = _mm_mul_ps(Inv1, Rcp0); + out[2] = _mm_mul_ps(Inv2, Rcp0); + out[3] = _mm_mul_ps(Inv3, Rcp0); +} + +GLM_FUNC_QUALIFIER void glm_mat4_inverse_lowp(glm_vec4 const in[4], glm_vec4 out[4]) +{ + __m128 Fac0; + { + // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; + // valType SubFactor06 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; + // valType SubFactor13 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac0 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac1; + { + // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; + // valType SubFactor07 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; + // valType SubFactor14 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac1 = _mm_sub_ps(Mul00, Mul01); + } + + + __m128 Fac2; + { + // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; + // valType SubFactor08 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; + // valType SubFactor15 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac2 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac3; + { + // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; + // valType SubFactor09 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; + // valType SubFactor16 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac3 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac4; + { + // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; + // valType SubFactor10 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; + // valType SubFactor17 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac4 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 Fac5; + { + // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; + // valType SubFactor12 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; + // valType SubFactor18 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; + + __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); + + __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); + __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); + + __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); + __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); + Fac5 = _mm_sub_ps(Mul00, Mul01); + } + + __m128 SignA = _mm_set_ps( 1.0f,-1.0f, 1.0f,-1.0f); + __m128 SignB = _mm_set_ps(-1.0f, 1.0f,-1.0f, 1.0f); + + // m[1][0] + // m[0][0] + // m[0][0] + // m[0][0] + __m128 Temp0 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Vec0 = _mm_shuffle_ps(Temp0, Temp0, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][1] + // m[0][1] + // m[0][1] + // m[0][1] + __m128 Temp1 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(1, 1, 1, 1)); + __m128 Vec1 = _mm_shuffle_ps(Temp1, Temp1, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][2] + // m[0][2] + // m[0][2] + // m[0][2] + __m128 Temp2 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(2, 2, 2, 2)); + __m128 Vec2 = _mm_shuffle_ps(Temp2, Temp2, _MM_SHUFFLE(2, 2, 2, 0)); + + // m[1][3] + // m[0][3] + // m[0][3] + // m[0][3] + __m128 Temp3 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(3, 3, 3, 3)); + __m128 Vec3 = _mm_shuffle_ps(Temp3, Temp3, _MM_SHUFFLE(2, 2, 2, 0)); + + // col0 + // + (Vec1[0] * Fac0[0] - Vec2[0] * Fac1[0] + Vec3[0] * Fac2[0]), + // - (Vec1[1] * Fac0[1] - Vec2[1] * Fac1[1] + Vec3[1] * Fac2[1]), + // + (Vec1[2] * Fac0[2] - Vec2[2] * Fac1[2] + Vec3[2] * Fac2[2]), + // - (Vec1[3] * Fac0[3] - Vec2[3] * Fac1[3] + Vec3[3] * Fac2[3]), + __m128 Mul00 = _mm_mul_ps(Vec1, Fac0); + __m128 Mul01 = _mm_mul_ps(Vec2, Fac1); + __m128 Mul02 = _mm_mul_ps(Vec3, Fac2); + __m128 Sub00 = _mm_sub_ps(Mul00, Mul01); + __m128 Add00 = _mm_add_ps(Sub00, Mul02); + __m128 Inv0 = _mm_mul_ps(SignB, Add00); + + // col1 + // - (Vec0[0] * Fac0[0] - Vec2[0] * Fac3[0] + Vec3[0] * Fac4[0]), + // + (Vec0[0] * Fac0[1] - Vec2[1] * Fac3[1] + Vec3[1] * Fac4[1]), + // - (Vec0[0] * Fac0[2] - Vec2[2] * Fac3[2] + Vec3[2] * Fac4[2]), + // + (Vec0[0] * Fac0[3] - Vec2[3] * Fac3[3] + Vec3[3] * Fac4[3]), + __m128 Mul03 = _mm_mul_ps(Vec0, Fac0); + __m128 Mul04 = _mm_mul_ps(Vec2, Fac3); + __m128 Mul05 = _mm_mul_ps(Vec3, Fac4); + __m128 Sub01 = _mm_sub_ps(Mul03, Mul04); + __m128 Add01 = _mm_add_ps(Sub01, Mul05); + __m128 Inv1 = _mm_mul_ps(SignA, Add01); + + // col2 + // + (Vec0[0] * Fac1[0] - Vec1[0] * Fac3[0] + Vec3[0] * Fac5[0]), + // - (Vec0[0] * Fac1[1] - Vec1[1] * Fac3[1] + Vec3[1] * Fac5[1]), + // + (Vec0[0] * Fac1[2] - Vec1[2] * Fac3[2] + Vec3[2] * Fac5[2]), + // - (Vec0[0] * Fac1[3] - Vec1[3] * Fac3[3] + Vec3[3] * Fac5[3]), + __m128 Mul06 = _mm_mul_ps(Vec0, Fac1); + __m128 Mul07 = _mm_mul_ps(Vec1, Fac3); + __m128 Mul08 = _mm_mul_ps(Vec3, Fac5); + __m128 Sub02 = _mm_sub_ps(Mul06, Mul07); + __m128 Add02 = _mm_add_ps(Sub02, Mul08); + __m128 Inv2 = _mm_mul_ps(SignB, Add02); + + // col3 + // - (Vec1[0] * Fac2[0] - Vec1[0] * Fac4[0] + Vec2[0] * Fac5[0]), + // + (Vec1[0] * Fac2[1] - Vec1[1] * Fac4[1] + Vec2[1] * Fac5[1]), + // - (Vec1[0] * Fac2[2] - Vec1[2] * Fac4[2] + Vec2[2] * Fac5[2]), + // + (Vec1[0] * Fac2[3] - Vec1[3] * Fac4[3] + Vec2[3] * Fac5[3])); + __m128 Mul09 = _mm_mul_ps(Vec0, Fac2); + __m128 Mul10 = _mm_mul_ps(Vec1, Fac4); + __m128 Mul11 = _mm_mul_ps(Vec2, Fac5); + __m128 Sub03 = _mm_sub_ps(Mul09, Mul10); + __m128 Add03 = _mm_add_ps(Sub03, Mul11); + __m128 Inv3 = _mm_mul_ps(SignA, Add03); + + __m128 Row0 = _mm_shuffle_ps(Inv0, Inv1, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Row1 = _mm_shuffle_ps(Inv2, Inv3, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Row2 = _mm_shuffle_ps(Row0, Row1, _MM_SHUFFLE(2, 0, 2, 0)); + + // valType Determinant = m[0][0] * Inverse[0][0] + // + m[0][1] * Inverse[1][0] + // + m[0][2] * Inverse[2][0] + // + m[0][3] * Inverse[3][0]; + __m128 Det0 = glm_vec4_dot(in[0], Row2); + __m128 Rcp0 = _mm_rcp_ps(Det0); + //__m128 Rcp0 = _mm_div_ps(one, Det0); + // Inverse /= Determinant; + out[0] = _mm_mul_ps(Inv0, Rcp0); + out[1] = _mm_mul_ps(Inv1, Rcp0); + out[2] = _mm_mul_ps(Inv2, Rcp0); + out[3] = _mm_mul_ps(Inv3, Rcp0); +} +/* +GLM_FUNC_QUALIFIER void glm_mat4_rotate(__m128 const in[4], float Angle, float const v[3], __m128 out[4]) +{ + float a = glm::radians(Angle); + float c = cos(a); + float s = sin(a); + + glm::vec4 AxisA(v[0], v[1], v[2], float(0)); + __m128 AxisB = _mm_set_ps(AxisA.w, AxisA.z, AxisA.y, AxisA.x); + __m128 AxisC = detail::sse_nrm_ps(AxisB); + + __m128 Cos0 = _mm_set_ss(c); + __m128 CosA = _mm_shuffle_ps(Cos0, Cos0, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 Sin0 = _mm_set_ss(s); + __m128 SinA = _mm_shuffle_ps(Sin0, Sin0, _MM_SHUFFLE(0, 0, 0, 0)); + + // vec<3, T, Q> temp = (valType(1) - c) * axis; + __m128 Temp0 = _mm_sub_ps(one, CosA); + __m128 Temp1 = _mm_mul_ps(Temp0, AxisC); + + //Rotate[0][0] = c + temp[0] * axis[0]; + //Rotate[0][1] = 0 + temp[0] * axis[1] + s * axis[2]; + //Rotate[0][2] = 0 + temp[0] * axis[2] - s * axis[1]; + __m128 Axis0 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(0, 0, 0, 0)); + __m128 TmpA0 = _mm_mul_ps(Axis0, AxisC); + __m128 CosA0 = _mm_shuffle_ps(Cos0, Cos0, _MM_SHUFFLE(1, 1, 1, 0)); + __m128 TmpA1 = _mm_add_ps(CosA0, TmpA0); + __m128 SinA0 = SinA;//_mm_set_ps(0.0f, s, -s, 0.0f); + __m128 TmpA2 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(3, 1, 2, 3)); + __m128 TmpA3 = _mm_mul_ps(SinA0, TmpA2); + __m128 TmpA4 = _mm_add_ps(TmpA1, TmpA3); + + //Rotate[1][0] = 0 + temp[1] * axis[0] - s * axis[2]; + //Rotate[1][1] = c + temp[1] * axis[1]; + //Rotate[1][2] = 0 + temp[1] * axis[2] + s * axis[0]; + __m128 Axis1 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(1, 1, 1, 1)); + __m128 TmpB0 = _mm_mul_ps(Axis1, AxisC); + __m128 CosA1 = _mm_shuffle_ps(Cos0, Cos0, _MM_SHUFFLE(1, 1, 0, 1)); + __m128 TmpB1 = _mm_add_ps(CosA1, TmpB0); + __m128 SinB0 = SinA;//_mm_set_ps(-s, 0.0f, s, 0.0f); + __m128 TmpB2 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(3, 0, 3, 2)); + __m128 TmpB3 = _mm_mul_ps(SinA0, TmpB2); + __m128 TmpB4 = _mm_add_ps(TmpB1, TmpB3); + + //Rotate[2][0] = 0 + temp[2] * axis[0] + s * axis[1]; + //Rotate[2][1] = 0 + temp[2] * axis[1] - s * axis[0]; + //Rotate[2][2] = c + temp[2] * axis[2]; + __m128 Axis2 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(2, 2, 2, 2)); + __m128 TmpC0 = _mm_mul_ps(Axis2, AxisC); + __m128 CosA2 = _mm_shuffle_ps(Cos0, Cos0, _MM_SHUFFLE(1, 0, 1, 1)); + __m128 TmpC1 = _mm_add_ps(CosA2, TmpC0); + __m128 SinC0 = SinA;//_mm_set_ps(s, -s, 0.0f, 0.0f); + __m128 TmpC2 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(3, 3, 0, 1)); + __m128 TmpC3 = _mm_mul_ps(SinA0, TmpC2); + __m128 TmpC4 = _mm_add_ps(TmpC1, TmpC3); + + __m128 Result[4]; + Result[0] = TmpA4; + Result[1] = TmpB4; + Result[2] = TmpC4; + Result[3] = _mm_set_ps(1, 0, 0, 0); + + //mat<4, 4, valType> Result; + //Result[0] = m[0] * Rotate[0][0] + m[1] * Rotate[0][1] + m[2] * Rotate[0][2]; + //Result[1] = m[0] * Rotate[1][0] + m[1] * Rotate[1][1] + m[2] * Rotate[1][2]; + //Result[2] = m[0] * Rotate[2][0] + m[1] * Rotate[2][1] + m[2] * Rotate[2][2]; + //Result[3] = m[3]; + //return Result; + sse_mul_ps(in, Result, out); +} +*/ +GLM_FUNC_QUALIFIER void glm_mat4_outerProduct(__m128 const& c, __m128 const& r, __m128 out[4]) +{ + out[0] = _mm_mul_ps(c, _mm_shuffle_ps(r, r, _MM_SHUFFLE(0, 0, 0, 0))); + out[1] = _mm_mul_ps(c, _mm_shuffle_ps(r, r, _MM_SHUFFLE(1, 1, 1, 1))); + out[2] = _mm_mul_ps(c, _mm_shuffle_ps(r, r, _MM_SHUFFLE(2, 2, 2, 2))); + out[3] = _mm_mul_ps(c, _mm_shuffle_ps(r, r, _MM_SHUFFLE(3, 3, 3, 3))); +} + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/simd/neon.h b/MacroLibX/third_party/glm/simd/neon.h new file mode 100644 index 0000000..6c38b06 --- /dev/null +++ b/MacroLibX/third_party/glm/simd/neon.h @@ -0,0 +1,155 @@ +/// @ref simd_neon +/// @file glm/simd/neon.h + +#pragma once + +#if GLM_ARCH & GLM_ARCH_NEON_BIT +#include + +namespace glm { + namespace neon { + static float32x4_t dupq_lane(float32x4_t vsrc, int lane) { + switch(lane) { +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + case 0: return vdupq_laneq_f32(vsrc, 0); + case 1: return vdupq_laneq_f32(vsrc, 1); + case 2: return vdupq_laneq_f32(vsrc, 2); + case 3: return vdupq_laneq_f32(vsrc, 3); +#else + case 0: return vdupq_n_f32(vgetq_lane_f32(vsrc, 0)); + case 1: return vdupq_n_f32(vgetq_lane_f32(vsrc, 1)); + case 2: return vdupq_n_f32(vgetq_lane_f32(vsrc, 2)); + case 3: return vdupq_n_f32(vgetq_lane_f32(vsrc, 3)); +#endif + } + assert(!"Unreachable code executed!"); + return vdupq_n_f32(0.0f); + } + + static float32x2_t dup_lane(float32x4_t vsrc, int lane) { + switch(lane) { +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + case 0: return vdup_laneq_f32(vsrc, 0); + case 1: return vdup_laneq_f32(vsrc, 1); + case 2: return vdup_laneq_f32(vsrc, 2); + case 3: return vdup_laneq_f32(vsrc, 3); +#else + case 0: return vdup_n_f32(vgetq_lane_f32(vsrc, 0)); + case 1: return vdup_n_f32(vgetq_lane_f32(vsrc, 1)); + case 2: return vdup_n_f32(vgetq_lane_f32(vsrc, 2)); + case 3: return vdup_n_f32(vgetq_lane_f32(vsrc, 3)); +#endif + } + assert(!"Unreachable code executed!"); + return vdup_n_f32(0.0f); + } + + static float32x4_t copy_lane(float32x4_t vdst, int dlane, float32x4_t vsrc, int slane) { +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + switch(dlane) { + case 0: + switch(slane) { + case 0: return vcopyq_laneq_f32(vdst, 0, vsrc, 0); + case 1: return vcopyq_laneq_f32(vdst, 0, vsrc, 1); + case 2: return vcopyq_laneq_f32(vdst, 0, vsrc, 2); + case 3: return vcopyq_laneq_f32(vdst, 0, vsrc, 3); + } + assert(!"Unreachable code executed!"); + case 1: + switch(slane) { + case 0: return vcopyq_laneq_f32(vdst, 1, vsrc, 0); + case 1: return vcopyq_laneq_f32(vdst, 1, vsrc, 1); + case 2: return vcopyq_laneq_f32(vdst, 1, vsrc, 2); + case 3: return vcopyq_laneq_f32(vdst, 1, vsrc, 3); + } + assert(!"Unreachable code executed!"); + case 2: + switch(slane) { + case 0: return vcopyq_laneq_f32(vdst, 2, vsrc, 0); + case 1: return vcopyq_laneq_f32(vdst, 2, vsrc, 1); + case 2: return vcopyq_laneq_f32(vdst, 2, vsrc, 2); + case 3: return vcopyq_laneq_f32(vdst, 2, vsrc, 3); + } + assert(!"Unreachable code executed!"); + case 3: + switch(slane) { + case 0: return vcopyq_laneq_f32(vdst, 3, vsrc, 0); + case 1: return vcopyq_laneq_f32(vdst, 3, vsrc, 1); + case 2: return vcopyq_laneq_f32(vdst, 3, vsrc, 2); + case 3: return vcopyq_laneq_f32(vdst, 3, vsrc, 3); + } + assert(!"Unreachable code executed!"); + } +#else + + float l; + switch(slane) { + case 0: l = vgetq_lane_f32(vsrc, 0); break; + case 1: l = vgetq_lane_f32(vsrc, 1); break; + case 2: l = vgetq_lane_f32(vsrc, 2); break; + case 3: l = vgetq_lane_f32(vsrc, 3); break; + default: + assert(!"Unreachable code executed!"); + } + switch(dlane) { + case 0: return vsetq_lane_f32(l, vdst, 0); + case 1: return vsetq_lane_f32(l, vdst, 1); + case 2: return vsetq_lane_f32(l, vdst, 2); + case 3: return vsetq_lane_f32(l, vdst, 3); + } +#endif + assert(!"Unreachable code executed!"); + return vdupq_n_f32(0.0f); + } + + static float32x4_t mul_lane(float32x4_t v, float32x4_t vlane, int lane) { +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT + switch(lane) { + case 0: return vmulq_laneq_f32(v, vlane, 0); break; + case 1: return vmulq_laneq_f32(v, vlane, 1); break; + case 2: return vmulq_laneq_f32(v, vlane, 2); break; + case 3: return vmulq_laneq_f32(v, vlane, 3); break; + default: + assert(!"Unreachable code executed!"); + } + assert(!"Unreachable code executed!"); + return vdupq_n_f32(0.0f); +#else + return vmulq_f32(v, dupq_lane(vlane, lane)); +#endif + } + + static float32x4_t madd_lane(float32x4_t acc, float32x4_t v, float32x4_t vlane, int lane) { +#if GLM_ARCH & GLM_ARCH_ARMV8_BIT +#ifdef GLM_CONFIG_FORCE_FMA +# define FMADD_LANE(acc, x, y, L) do { asm volatile ("fmla %0.4s, %1.4s, %2.4s" : "+w"(acc) : "w"(x), "w"(dup_lane(y, L))); } while(0) +#else +# define FMADD_LANE(acc, x, y, L) do { acc = vmlaq_laneq_f32(acc, x, y, L); } while(0) +#endif + + switch(lane) { + case 0: + FMADD_LANE(acc, v, vlane, 0); + return acc; + case 1: + FMADD_LANE(acc, v, vlane, 1); + return acc; + case 2: + FMADD_LANE(acc, v, vlane, 2); + return acc; + case 3: + FMADD_LANE(acc, v, vlane, 3); + return acc; + default: + assert(!"Unreachable code executed!"); + } + assert(!"Unreachable code executed!"); + return vdupq_n_f32(0.0f); +# undef FMADD_LANE +#else + return vaddq_f32(acc, vmulq_f32(v, dupq_lane(vlane, lane))); +#endif + } + } //namespace neon +} // namespace glm +#endif // GLM_ARCH & GLM_ARCH_NEON_BIT diff --git a/MacroLibX/third_party/glm/simd/packing.h b/MacroLibX/third_party/glm/simd/packing.h new file mode 100644 index 0000000..609163e --- /dev/null +++ b/MacroLibX/third_party/glm/simd/packing.h @@ -0,0 +1,8 @@ +/// @ref simd +/// @file glm/simd/packing.h + +#pragma once + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/simd/platform.h b/MacroLibX/third_party/glm/simd/platform.h new file mode 100644 index 0000000..ad25cc1 --- /dev/null +++ b/MacroLibX/third_party/glm/simd/platform.h @@ -0,0 +1,398 @@ +#pragma once + +/////////////////////////////////////////////////////////////////////////////////// +// Platform + +#define GLM_PLATFORM_UNKNOWN 0x00000000 +#define GLM_PLATFORM_WINDOWS 0x00010000 +#define GLM_PLATFORM_LINUX 0x00020000 +#define GLM_PLATFORM_APPLE 0x00040000 +//#define GLM_PLATFORM_IOS 0x00080000 +#define GLM_PLATFORM_ANDROID 0x00100000 +#define GLM_PLATFORM_CHROME_NACL 0x00200000 +#define GLM_PLATFORM_UNIX 0x00400000 +#define GLM_PLATFORM_QNXNTO 0x00800000 +#define GLM_PLATFORM_WINCE 0x01000000 +#define GLM_PLATFORM_CYGWIN 0x02000000 + +#ifdef GLM_FORCE_PLATFORM_UNKNOWN +# define GLM_PLATFORM GLM_PLATFORM_UNKNOWN +#elif defined(__CYGWIN__) +# define GLM_PLATFORM GLM_PLATFORM_CYGWIN +#elif defined(__QNXNTO__) +# define GLM_PLATFORM GLM_PLATFORM_QNXNTO +#elif defined(__APPLE__) +# define GLM_PLATFORM GLM_PLATFORM_APPLE +#elif defined(WINCE) +# define GLM_PLATFORM GLM_PLATFORM_WINCE +#elif defined(_WIN32) +# define GLM_PLATFORM GLM_PLATFORM_WINDOWS +#elif defined(__native_client__) +# define GLM_PLATFORM GLM_PLATFORM_CHROME_NACL +#elif defined(__ANDROID__) +# define GLM_PLATFORM GLM_PLATFORM_ANDROID +#elif defined(__linux) +# define GLM_PLATFORM GLM_PLATFORM_LINUX +#elif defined(__unix) +# define GLM_PLATFORM GLM_PLATFORM_UNIX +#else +# define GLM_PLATFORM GLM_PLATFORM_UNKNOWN +#endif// + +/////////////////////////////////////////////////////////////////////////////////// +// Compiler + +#define GLM_COMPILER_UNKNOWN 0x00000000 + +// Intel +#define GLM_COMPILER_INTEL 0x00100000 +#define GLM_COMPILER_INTEL14 0x00100040 +#define GLM_COMPILER_INTEL15 0x00100050 +#define GLM_COMPILER_INTEL16 0x00100060 +#define GLM_COMPILER_INTEL17 0x00100070 + +// Visual C++ defines +#define GLM_COMPILER_VC 0x01000000 +#define GLM_COMPILER_VC12 0x01000001 +#define GLM_COMPILER_VC14 0x01000002 +#define GLM_COMPILER_VC15 0x01000003 +#define GLM_COMPILER_VC15_3 0x01000004 +#define GLM_COMPILER_VC15_5 0x01000005 +#define GLM_COMPILER_VC15_6 0x01000006 +#define GLM_COMPILER_VC15_7 0x01000007 +#define GLM_COMPILER_VC15_8 0x01000008 +#define GLM_COMPILER_VC15_9 0x01000009 +#define GLM_COMPILER_VC16 0x0100000A + +// GCC defines +#define GLM_COMPILER_GCC 0x02000000 +#define GLM_COMPILER_GCC46 0x020000D0 +#define GLM_COMPILER_GCC47 0x020000E0 +#define GLM_COMPILER_GCC48 0x020000F0 +#define GLM_COMPILER_GCC49 0x02000100 +#define GLM_COMPILER_GCC5 0x02000200 +#define GLM_COMPILER_GCC6 0x02000300 +#define GLM_COMPILER_GCC7 0x02000400 +#define GLM_COMPILER_GCC8 0x02000500 + +// CUDA +#define GLM_COMPILER_CUDA 0x10000000 +#define GLM_COMPILER_CUDA75 0x10000001 +#define GLM_COMPILER_CUDA80 0x10000002 +#define GLM_COMPILER_CUDA90 0x10000004 + +// SYCL +#define GLM_COMPILER_SYCL 0x00300000 + +// Clang +#define GLM_COMPILER_CLANG 0x20000000 +#define GLM_COMPILER_CLANG34 0x20000050 +#define GLM_COMPILER_CLANG35 0x20000060 +#define GLM_COMPILER_CLANG36 0x20000070 +#define GLM_COMPILER_CLANG37 0x20000080 +#define GLM_COMPILER_CLANG38 0x20000090 +#define GLM_COMPILER_CLANG39 0x200000A0 +#define GLM_COMPILER_CLANG40 0x200000B0 +#define GLM_COMPILER_CLANG41 0x200000C0 +#define GLM_COMPILER_CLANG42 0x200000D0 + +// Build model +#define GLM_MODEL_32 0x00000010 +#define GLM_MODEL_64 0x00000020 + +// Force generic C++ compiler +#ifdef GLM_FORCE_COMPILER_UNKNOWN +# define GLM_COMPILER GLM_COMPILER_UNKNOWN + +#elif defined(__INTEL_COMPILER) +# if __INTEL_COMPILER >= 1700 +# define GLM_COMPILER GLM_COMPILER_INTEL17 +# elif __INTEL_COMPILER >= 1600 +# define GLM_COMPILER GLM_COMPILER_INTEL16 +# elif __INTEL_COMPILER >= 1500 +# define GLM_COMPILER GLM_COMPILER_INTEL15 +# elif __INTEL_COMPILER >= 1400 +# define GLM_COMPILER GLM_COMPILER_INTEL14 +# elif __INTEL_COMPILER < 1400 +# error "GLM requires ICC 2013 SP1 or newer" +# endif + +// CUDA +#elif defined(__CUDACC__) +# if !defined(CUDA_VERSION) && !defined(GLM_FORCE_CUDA) +# include // make sure version is defined since nvcc does not define it itself! +# endif +# if CUDA_VERSION >= 8000 +# define GLM_COMPILER GLM_COMPILER_CUDA80 +# elif CUDA_VERSION >= 7500 +# define GLM_COMPILER GLM_COMPILER_CUDA75 +# elif CUDA_VERSION >= 7000 +# define GLM_COMPILER GLM_COMPILER_CUDA70 +# elif CUDA_VERSION < 7000 +# error "GLM requires CUDA 7.0 or higher" +# endif + +// SYCL +#elif defined(__SYCL_DEVICE_ONLY__) +# define GLM_COMPILER GLM_COMPILER_SYCL + +// Clang +#elif defined(__clang__) +# if defined(__apple_build_version__) +# if (__clang_major__ < 6) +# error "GLM requires Clang 3.4 / Apple Clang 6.0 or higher" +# elif __clang_major__ == 6 && __clang_minor__ == 0 +# define GLM_COMPILER GLM_COMPILER_CLANG35 +# elif __clang_major__ == 6 && __clang_minor__ >= 1 +# define GLM_COMPILER GLM_COMPILER_CLANG36 +# elif __clang_major__ >= 7 +# define GLM_COMPILER GLM_COMPILER_CLANG37 +# endif +# else +# if ((__clang_major__ == 3) && (__clang_minor__ < 4)) || (__clang_major__ < 3) +# error "GLM requires Clang 3.4 or higher" +# elif __clang_major__ == 3 && __clang_minor__ == 4 +# define GLM_COMPILER GLM_COMPILER_CLANG34 +# elif __clang_major__ == 3 && __clang_minor__ == 5 +# define GLM_COMPILER GLM_COMPILER_CLANG35 +# elif __clang_major__ == 3 && __clang_minor__ == 6 +# define GLM_COMPILER GLM_COMPILER_CLANG36 +# elif __clang_major__ == 3 && __clang_minor__ == 7 +# define GLM_COMPILER GLM_COMPILER_CLANG37 +# elif __clang_major__ == 3 && __clang_minor__ == 8 +# define GLM_COMPILER GLM_COMPILER_CLANG38 +# elif __clang_major__ == 3 && __clang_minor__ >= 9 +# define GLM_COMPILER GLM_COMPILER_CLANG39 +# elif __clang_major__ == 4 && __clang_minor__ == 0 +# define GLM_COMPILER GLM_COMPILER_CLANG40 +# elif __clang_major__ == 4 && __clang_minor__ == 1 +# define GLM_COMPILER GLM_COMPILER_CLANG41 +# elif __clang_major__ == 4 && __clang_minor__ >= 2 +# define GLM_COMPILER GLM_COMPILER_CLANG42 +# elif __clang_major__ >= 4 +# define GLM_COMPILER GLM_COMPILER_CLANG42 +# endif +# endif + +// Visual C++ +#elif defined(_MSC_VER) +# if _MSC_VER >= 1920 +# define GLM_COMPILER GLM_COMPILER_VC16 +# elif _MSC_VER >= 1916 +# define GLM_COMPILER GLM_COMPILER_VC15_9 +# elif _MSC_VER >= 1915 +# define GLM_COMPILER GLM_COMPILER_VC15_8 +# elif _MSC_VER >= 1914 +# define GLM_COMPILER GLM_COMPILER_VC15_7 +# elif _MSC_VER >= 1913 +# define GLM_COMPILER GLM_COMPILER_VC15_6 +# elif _MSC_VER >= 1912 +# define GLM_COMPILER GLM_COMPILER_VC15_5 +# elif _MSC_VER >= 1911 +# define GLM_COMPILER GLM_COMPILER_VC15_3 +# elif _MSC_VER >= 1910 +# define GLM_COMPILER GLM_COMPILER_VC15 +# elif _MSC_VER >= 1900 +# define GLM_COMPILER GLM_COMPILER_VC14 +# elif _MSC_VER >= 1800 +# define GLM_COMPILER GLM_COMPILER_VC12 +# elif _MSC_VER < 1800 +# error "GLM requires Visual C++ 12 - 2013 or higher" +# endif//_MSC_VER + +// G++ +#elif defined(__GNUC__) || defined(__MINGW32__) +# if __GNUC__ >= 8 +# define GLM_COMPILER GLM_COMPILER_GCC8 +# elif __GNUC__ >= 7 +# define GLM_COMPILER GLM_COMPILER_GCC7 +# elif __GNUC__ >= 6 +# define GLM_COMPILER GLM_COMPILER_GCC6 +# elif __GNUC__ >= 5 +# define GLM_COMPILER GLM_COMPILER_GCC5 +# elif __GNUC__ == 4 && __GNUC_MINOR__ >= 9 +# define GLM_COMPILER GLM_COMPILER_GCC49 +# elif __GNUC__ == 4 && __GNUC_MINOR__ >= 8 +# define GLM_COMPILER GLM_COMPILER_GCC48 +# elif __GNUC__ == 4 && __GNUC_MINOR__ >= 7 +# define GLM_COMPILER GLM_COMPILER_GCC47 +# elif __GNUC__ == 4 && __GNUC_MINOR__ >= 6 +# define GLM_COMPILER GLM_COMPILER_GCC46 +# elif ((__GNUC__ == 4) && (__GNUC_MINOR__ < 6)) || (__GNUC__ < 4) +# error "GLM requires GCC 4.6 or higher" +# endif + +#else +# define GLM_COMPILER GLM_COMPILER_UNKNOWN +#endif + +#ifndef GLM_COMPILER +# error "GLM_COMPILER undefined, your compiler may not be supported by GLM. Add #define GLM_COMPILER 0 to ignore this message." +#endif//GLM_COMPILER + +/////////////////////////////////////////////////////////////////////////////////// +// Instruction sets + +// User defines: GLM_FORCE_PURE GLM_FORCE_INTRINSICS GLM_FORCE_SSE2 GLM_FORCE_SSE3 GLM_FORCE_AVX GLM_FORCE_AVX2 GLM_FORCE_AVX2 + +#define GLM_ARCH_MIPS_BIT (0x10000000) +#define GLM_ARCH_PPC_BIT (0x20000000) +#define GLM_ARCH_ARM_BIT (0x40000000) +#define GLM_ARCH_ARMV8_BIT (0x01000000) +#define GLM_ARCH_X86_BIT (0x80000000) + +#define GLM_ARCH_SIMD_BIT (0x00001000) + +#define GLM_ARCH_NEON_BIT (0x00000001) +#define GLM_ARCH_SSE_BIT (0x00000002) +#define GLM_ARCH_SSE2_BIT (0x00000004) +#define GLM_ARCH_SSE3_BIT (0x00000008) +#define GLM_ARCH_SSSE3_BIT (0x00000010) +#define GLM_ARCH_SSE41_BIT (0x00000020) +#define GLM_ARCH_SSE42_BIT (0x00000040) +#define GLM_ARCH_AVX_BIT (0x00000080) +#define GLM_ARCH_AVX2_BIT (0x00000100) + +#define GLM_ARCH_UNKNOWN (0) +#define GLM_ARCH_X86 (GLM_ARCH_X86_BIT) +#define GLM_ARCH_SSE (GLM_ARCH_SSE_BIT | GLM_ARCH_SIMD_BIT | GLM_ARCH_X86) +#define GLM_ARCH_SSE2 (GLM_ARCH_SSE2_BIT | GLM_ARCH_SSE) +#define GLM_ARCH_SSE3 (GLM_ARCH_SSE3_BIT | GLM_ARCH_SSE2) +#define GLM_ARCH_SSSE3 (GLM_ARCH_SSSE3_BIT | GLM_ARCH_SSE3) +#define GLM_ARCH_SSE41 (GLM_ARCH_SSE41_BIT | GLM_ARCH_SSSE3) +#define GLM_ARCH_SSE42 (GLM_ARCH_SSE42_BIT | GLM_ARCH_SSE41) +#define GLM_ARCH_AVX (GLM_ARCH_AVX_BIT | GLM_ARCH_SSE42) +#define GLM_ARCH_AVX2 (GLM_ARCH_AVX2_BIT | GLM_ARCH_AVX) +#define GLM_ARCH_ARM (GLM_ARCH_ARM_BIT) +#define GLM_ARCH_ARMV8 (GLM_ARCH_NEON_BIT | GLM_ARCH_SIMD_BIT | GLM_ARCH_ARM | GLM_ARCH_ARMV8_BIT) +#define GLM_ARCH_NEON (GLM_ARCH_NEON_BIT | GLM_ARCH_SIMD_BIT | GLM_ARCH_ARM) +#define GLM_ARCH_MIPS (GLM_ARCH_MIPS_BIT) +#define GLM_ARCH_PPC (GLM_ARCH_PPC_BIT) + +#if defined(GLM_FORCE_ARCH_UNKNOWN) || defined(GLM_FORCE_PURE) +# define GLM_ARCH GLM_ARCH_UNKNOWN +#elif defined(GLM_FORCE_NEON) +# if __ARM_ARCH >= 8 +# define GLM_ARCH (GLM_ARCH_ARMV8) +# else +# define GLM_ARCH (GLM_ARCH_NEON) +# endif +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_AVX2) +# define GLM_ARCH (GLM_ARCH_AVX2) +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_AVX) +# define GLM_ARCH (GLM_ARCH_AVX) +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_SSE42) +# define GLM_ARCH (GLM_ARCH_SSE42) +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_SSE41) +# define GLM_ARCH (GLM_ARCH_SSE41) +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_SSSE3) +# define GLM_ARCH (GLM_ARCH_SSSE3) +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_SSE3) +# define GLM_ARCH (GLM_ARCH_SSE3) +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_SSE2) +# define GLM_ARCH (GLM_ARCH_SSE2) +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_SSE) +# define GLM_ARCH (GLM_ARCH_SSE) +# define GLM_FORCE_INTRINSICS +#elif defined(GLM_FORCE_INTRINSICS) && !defined(GLM_FORCE_XYZW_ONLY) +# if defined(__AVX2__) +# define GLM_ARCH (GLM_ARCH_AVX2) +# elif defined(__AVX__) +# define GLM_ARCH (GLM_ARCH_AVX) +# elif defined(__SSE4_2__) +# define GLM_ARCH (GLM_ARCH_SSE42) +# elif defined(__SSE4_1__) +# define GLM_ARCH (GLM_ARCH_SSE41) +# elif defined(__SSSE3__) +# define GLM_ARCH (GLM_ARCH_SSSE3) +# elif defined(__SSE3__) +# define GLM_ARCH (GLM_ARCH_SSE3) +# elif defined(__SSE2__) || defined(__x86_64__) || defined(_M_X64) || defined(_M_IX86_FP) +# define GLM_ARCH (GLM_ARCH_SSE2) +# elif defined(__i386__) +# define GLM_ARCH (GLM_ARCH_X86) +# elif defined(__ARM_ARCH) && (__ARM_ARCH >= 8) +# define GLM_ARCH (GLM_ARCH_ARMV8) +# elif defined(__ARM_NEON) +# define GLM_ARCH (GLM_ARCH_ARM | GLM_ARCH_NEON) +# elif defined(__arm__ ) || defined(_M_ARM) +# define GLM_ARCH (GLM_ARCH_ARM) +# elif defined(__mips__ ) +# define GLM_ARCH (GLM_ARCH_MIPS) +# elif defined(__powerpc__ ) || defined(_M_PPC) +# define GLM_ARCH (GLM_ARCH_PPC) +# else +# define GLM_ARCH (GLM_ARCH_UNKNOWN) +# endif +#else +# if defined(__x86_64__) || defined(_M_X64) || defined(_M_IX86) || defined(__i386__) +# define GLM_ARCH (GLM_ARCH_X86) +# elif defined(__arm__) || defined(_M_ARM) +# define GLM_ARCH (GLM_ARCH_ARM) +# elif defined(__powerpc__) || defined(_M_PPC) +# define GLM_ARCH (GLM_ARCH_PPC) +# elif defined(__mips__) +# define GLM_ARCH (GLM_ARCH_MIPS) +# else +# define GLM_ARCH (GLM_ARCH_UNKNOWN) +# endif +#endif + +#if GLM_ARCH & GLM_ARCH_AVX2_BIT +# include +#elif GLM_ARCH & GLM_ARCH_AVX_BIT +# include +#elif GLM_ARCH & GLM_ARCH_SSE42_BIT +# if GLM_COMPILER & GLM_COMPILER_CLANG +# include +# endif +# include +#elif GLM_ARCH & GLM_ARCH_SSE41_BIT +# include +#elif GLM_ARCH & GLM_ARCH_SSSE3_BIT +# include +#elif GLM_ARCH & GLM_ARCH_SSE3_BIT +# include +#elif GLM_ARCH & GLM_ARCH_SSE2_BIT +# include +#elif GLM_ARCH & GLM_ARCH_NEON_BIT +# include "neon.h" +#endif//GLM_ARCH + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + typedef __m128 glm_f32vec4; + typedef __m128i glm_i32vec4; + typedef __m128i glm_u32vec4; + typedef __m128d glm_f64vec2; + typedef __m128i glm_i64vec2; + typedef __m128i glm_u64vec2; + + typedef glm_f32vec4 glm_vec4; + typedef glm_i32vec4 glm_ivec4; + typedef glm_u32vec4 glm_uvec4; + typedef glm_f64vec2 glm_dvec2; +#endif + +#if GLM_ARCH & GLM_ARCH_AVX_BIT + typedef __m256d glm_f64vec4; + typedef glm_f64vec4 glm_dvec4; +#endif + +#if GLM_ARCH & GLM_ARCH_AVX2_BIT + typedef __m256i glm_i64vec4; + typedef __m256i glm_u64vec4; +#endif + +#if GLM_ARCH & GLM_ARCH_NEON_BIT + typedef float32x4_t glm_f32vec4; + typedef int32x4_t glm_i32vec4; + typedef uint32x4_t glm_u32vec4; +#endif diff --git a/MacroLibX/third_party/glm/simd/trigonometric.h b/MacroLibX/third_party/glm/simd/trigonometric.h new file mode 100644 index 0000000..739b796 --- /dev/null +++ b/MacroLibX/third_party/glm/simd/trigonometric.h @@ -0,0 +1,9 @@ +/// @ref simd +/// @file glm/simd/trigonometric.h + +#pragma once + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT + diff --git a/MacroLibX/third_party/glm/simd/vector_relational.h b/MacroLibX/third_party/glm/simd/vector_relational.h new file mode 100644 index 0000000..f7385e9 --- /dev/null +++ b/MacroLibX/third_party/glm/simd/vector_relational.h @@ -0,0 +1,8 @@ +/// @ref simd +/// @file glm/simd/vector_relational.h + +#pragma once + +#if GLM_ARCH & GLM_ARCH_SSE2_BIT + +#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/MacroLibX/third_party/glm/trigonometric.hpp b/MacroLibX/third_party/glm/trigonometric.hpp new file mode 100644 index 0000000..fcf07f8 --- /dev/null +++ b/MacroLibX/third_party/glm/trigonometric.hpp @@ -0,0 +1,210 @@ +/// @ref core +/// @file glm/trigonometric.hpp +/// +/// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions +/// +/// @defgroup core_func_trigonometric Angle and Trigonometry Functions +/// @ingroup core +/// +/// Function parameters specified as angle are assumed to be in units of radians. +/// In no case will any of these functions result in a divide by zero error. If +/// the divisor of a ratio is 0, then results will be undefined. +/// +/// These all operate component-wise. The description is per component. +/// +/// Include to use these core features. +/// +/// @see ext_vector_trigonometric + +#pragma once + +#include "detail/setup.hpp" +#include "detail/qualifier.hpp" + +namespace glm +{ + /// @addtogroup core_func_trigonometric + /// @{ + + /// Converts degrees to radians and returns the result. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL radians man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec radians(vec const& degrees); + + /// Converts radians to degrees and returns the result. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL degrees man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec degrees(vec const& radians); + + /// The standard trigonometric sine function. + /// The values returned by this function will range from [-1, 1]. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL sin man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec sin(vec const& angle); + + /// The standard trigonometric cosine function. + /// The values returned by this function will range from [-1, 1]. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL cos man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec cos(vec const& angle); + + /// The standard trigonometric tangent function. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL tan man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec tan(vec const& angle); + + /// Arc sine. Returns an angle whose sine is x. + /// The range of values returned by this function is [-PI/2, PI/2]. + /// Results are undefined if |x| > 1. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL asin man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec asin(vec const& x); + + /// Arc cosine. Returns an angle whose sine is x. + /// The range of values returned by this function is [0, PI]. + /// Results are undefined if |x| > 1. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL acos man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec acos(vec const& x); + + /// Arc tangent. Returns an angle whose tangent is y/x. + /// The signs of x and y are used to determine what + /// quadrant the angle is in. The range of values returned + /// by this function is [-PI, PI]. Results are undefined + /// if x and y are both 0. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL atan man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec atan(vec const& y, vec const& x); + + /// Arc tangent. Returns an angle whose tangent is y_over_x. + /// The range of values returned by this function is [-PI/2, PI/2]. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL atan man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec atan(vec const& y_over_x); + + /// Returns the hyperbolic sine function, (exp(x) - exp(-x)) / 2 + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL sinh man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec sinh(vec const& angle); + + /// Returns the hyperbolic cosine function, (exp(x) + exp(-x)) / 2 + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL cosh man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec cosh(vec const& angle); + + /// Returns the hyperbolic tangent function, sinh(angle) / cosh(angle) + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL tanh man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec tanh(vec const& angle); + + /// Arc hyperbolic sine; returns the inverse of sinh. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL asinh man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec asinh(vec const& x); + + /// Arc hyperbolic cosine; returns the non-negative inverse + /// of cosh. Results are undefined if x < 1. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL acosh man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec acosh(vec const& x); + + /// Arc hyperbolic tangent; returns the inverse of tanh. + /// Results are undefined if abs(x) >= 1. + /// + /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector + /// @tparam T Floating-point scalar types + /// @tparam Q Value from qualifier enum + /// + /// @see GLSL atanh man page + /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions + template + GLM_FUNC_DECL vec atanh(vec const& x); + + /// @} +}//namespace glm + +#include "detail/func_trigonometric.inl" diff --git a/MacroLibX/third_party/glm/vec2.hpp b/MacroLibX/third_party/glm/vec2.hpp new file mode 100644 index 0000000..be768bf --- /dev/null +++ b/MacroLibX/third_party/glm/vec2.hpp @@ -0,0 +1,14 @@ +/// @ref core +/// @file glm/vec2.hpp + +#pragma once +#include "./ext/vector_bool2.hpp" +#include "./ext/vector_bool2_precision.hpp" +#include "./ext/vector_float2.hpp" +#include "./ext/vector_float2_precision.hpp" +#include "./ext/vector_double2.hpp" +#include "./ext/vector_double2_precision.hpp" +#include "./ext/vector_int2.hpp" +#include "./ext/vector_int2_precision.hpp" +#include "./ext/vector_uint2.hpp" +#include "./ext/vector_uint2_precision.hpp" diff --git a/MacroLibX/third_party/glm/vec3.hpp b/MacroLibX/third_party/glm/vec3.hpp new file mode 100644 index 0000000..f570722 --- /dev/null +++ b/MacroLibX/third_party/glm/vec3.hpp @@ -0,0 +1,14 @@ +/// @ref core +/// @file glm/vec3.hpp + +#pragma once +#include "./ext/vector_bool3.hpp" +#include "./ext/vector_bool3_precision.hpp" +#include "./ext/vector_float3.hpp" +#include "./ext/vector_float3_precision.hpp" +#include "./ext/vector_double3.hpp" +#include "./ext/vector_double3_precision.hpp" +#include "./ext/vector_int3.hpp" +#include "./ext/vector_int3_precision.hpp" +#include "./ext/vector_uint3.hpp" +#include "./ext/vector_uint3_precision.hpp" diff --git a/MacroLibX/third_party/glm/vec4.hpp b/MacroLibX/third_party/glm/vec4.hpp new file mode 100644 index 0000000..9117020 --- /dev/null +++ b/MacroLibX/third_party/glm/vec4.hpp @@ -0,0 +1,15 @@ +/// @ref core +/// @file glm/vec4.hpp + +#pragma once +#include "./ext/vector_bool4.hpp" +#include "./ext/vector_bool4_precision.hpp" +#include "./ext/vector_float4.hpp" +#include "./ext/vector_float4_precision.hpp" +#include "./ext/vector_double4.hpp" +#include "./ext/vector_double4_precision.hpp" +#include "./ext/vector_int4.hpp" +#include "./ext/vector_int4_precision.hpp" +#include "./ext/vector_uint4.hpp" +#include "./ext/vector_uint4_precision.hpp" + diff --git a/MacroLibX/third_party/glm/vector_relational.hpp b/MacroLibX/third_party/glm/vector_relational.hpp new file mode 100644 index 0000000..a0fe17e --- /dev/null +++ b/MacroLibX/third_party/glm/vector_relational.hpp @@ -0,0 +1,121 @@ +/// @ref core +/// @file glm/vector_relational.hpp +/// +/// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions +/// +/// @defgroup core_func_vector_relational Vector Relational Functions +/// @ingroup core +/// +/// Relational and equality operators (<, <=, >, >=, ==, !=) are defined to +/// operate on scalars and produce scalar Boolean results. For vector results, +/// use the following built-in functions. +/// +/// In all cases, the sizes of all the input and return vectors for any particular +/// call must match. +/// +/// Include to use these core features. +/// +/// @see ext_vector_relational + +#pragma once + +#include "detail/qualifier.hpp" +#include "detail/setup.hpp" + +namespace glm +{ + /// @addtogroup core_func_vector_relational + /// @{ + + /// Returns the component-wise comparison result of x < y. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T A floating-point or integer scalar type. + /// + /// @see GLSL lessThan man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec lessThan(vec const& x, vec const& y); + + /// Returns the component-wise comparison of result x <= y. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T A floating-point or integer scalar type. + /// + /// @see GLSL lessThanEqual man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec lessThanEqual(vec const& x, vec const& y); + + /// Returns the component-wise comparison of result x > y. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T A floating-point or integer scalar type. + /// + /// @see GLSL greaterThan man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec greaterThan(vec const& x, vec const& y); + + /// Returns the component-wise comparison of result x >= y. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T A floating-point or integer scalar type. + /// + /// @see GLSL greaterThanEqual man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec greaterThanEqual(vec const& x, vec const& y); + + /// Returns the component-wise comparison of result x == y. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T A floating-point, integer or bool scalar type. + /// + /// @see GLSL equal man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y); + + /// Returns the component-wise comparison of result x != y. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// @tparam T A floating-point, integer or bool scalar type. + /// + /// @see GLSL notEqual man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y); + + /// Returns true if any component of x is true. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// + /// @see GLSL any man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR bool any(vec const& v); + + /// Returns true if all components of x are true. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// + /// @see GLSL all man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR bool all(vec const& v); + + /// Returns the component-wise logical complement of x. + /// /!\ Because of language incompatibilities between C++ and GLSL, GLM defines the function not but not_ instead. + /// + /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. + /// + /// @see GLSL not man page + /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions + template + GLM_FUNC_DECL GLM_CONSTEXPR vec not_(vec const& v); + + /// @} +}//namespace glm + +#include "detail/func_vector_relational.inl" diff --git a/MacroLibX/third_party/stb_image.h b/MacroLibX/third_party/stb_image.h new file mode 100644 index 0000000..5e807a0 --- /dev/null +++ b/MacroLibX/third_party/stb_image.h @@ -0,0 +1,7987 @@ +/* stb_image - v2.28 - public domain image loader - http://nothings.org/stb + no warranty implied; use at your own risk + + Do this: + #define STB_IMAGE_IMPLEMENTATION + before you include this file in *one* C or C++ file to create the implementation. + + // i.e. it should look like this: + #include ... + #include ... + #include ... + #define STB_IMAGE_IMPLEMENTATION + #include "stb_image.h" + + You can #define STBI_ASSERT(x) before the #include to avoid using assert.h. + And #define STBI_MALLOC, STBI_REALLOC, and STBI_FREE to avoid using malloc,realloc,free + + + QUICK NOTES: + Primarily of interest to game developers and other people who can + avoid problematic images and only need the trivial interface + + JPEG baseline & progressive (12 bpc/arithmetic not supported, same as stock IJG lib) + PNG 1/2/4/8/16-bit-per-channel + + TGA (not sure what subset, if a subset) + BMP non-1bpp, non-RLE + PSD (composited view only, no extra channels, 8/16 bit-per-channel) + + GIF (*comp always reports as 4-channel) + HDR (radiance rgbE format) + PIC (Softimage PIC) + PNM (PPM and PGM binary only) + + Animated GIF still needs a proper API, but here's one way to do it: + http://gist.github.com/urraka/685d9a6340b26b830d49 + + - decode from memory or through FILE (define STBI_NO_STDIO to remove code) + - decode from arbitrary I/O callbacks + - SIMD acceleration on x86/x64 (SSE2) and ARM (NEON) + + Full documentation under "DOCUMENTATION" below. + + +LICENSE + + See end of file for license information. + +RECENT REVISION HISTORY: + + 2.28 (2023-01-29) many error fixes, security errors, just tons of stuff + 2.27 (2021-07-11) document stbi_info better, 16-bit PNM support, bug fixes + 2.26 (2020-07-13) many minor fixes + 2.25 (2020-02-02) fix warnings + 2.24 (2020-02-02) fix warnings; thread-local failure_reason and flip_vertically + 2.23 (2019-08-11) fix clang static analysis warning + 2.22 (2019-03-04) gif fixes, fix warnings + 2.21 (2019-02-25) fix typo in comment + 2.20 (2019-02-07) support utf8 filenames in Windows; fix warnings and platform ifdefs + 2.19 (2018-02-11) fix warning + 2.18 (2018-01-30) fix warnings + 2.17 (2018-01-29) bugfix, 1-bit BMP, 16-bitness query, fix warnings + 2.16 (2017-07-23) all functions have 16-bit variants; optimizations; bugfixes + 2.15 (2017-03-18) fix png-1,2,4; all Imagenet JPGs; no runtime SSE detection on GCC + 2.14 (2017-03-03) remove deprecated STBI_JPEG_OLD; fixes for Imagenet JPGs + 2.13 (2016-12-04) experimental 16-bit API, only for PNG so far; fixes + 2.12 (2016-04-02) fix typo in 2.11 PSD fix that caused crashes + 2.11 (2016-04-02) 16-bit PNGS; enable SSE2 in non-gcc x64 + RGB-format JPEG; remove white matting in PSD; + allocate large structures on the stack; + correct channel count for PNG & BMP + 2.10 (2016-01-22) avoid warning introduced in 2.09 + 2.09 (2016-01-16) 16-bit TGA; comments in PNM files; STBI_REALLOC_SIZED + + See end of file for full revision history. + + + ============================ Contributors ========================= + + Image formats Extensions, features + Sean Barrett (jpeg, png, bmp) Jetro Lauha (stbi_info) + Nicolas Schulz (hdr, psd) Martin "SpartanJ" Golini (stbi_info) + Jonathan Dummer (tga) James "moose2000" Brown (iPhone PNG) + Jean-Marc Lienher (gif) Ben "Disch" Wenger (io callbacks) + Tom Seddon (pic) Omar Cornut (1/2/4-bit PNG) + Thatcher Ulrich (psd) Nicolas Guillemot (vertical flip) + Ken Miller (pgm, ppm) Richard Mitton (16-bit PSD) + github:urraka (animated gif) Junggon Kim (PNM comments) + Christopher Forseth (animated gif) Daniel Gibson (16-bit TGA) + socks-the-fox (16-bit PNG) + Jeremy Sawicki (handle all ImageNet JPGs) + Optimizations & bugfixes Mikhail Morozov (1-bit BMP) + Fabian "ryg" Giesen Anael Seghezzi (is-16-bit query) + Arseny Kapoulkine Simon Breuss (16-bit PNM) + John-Mark Allen + Carmelo J Fdez-Aguera + + Bug & warning fixes + Marc LeBlanc David Woo Guillaume George Martins Mozeiko + Christpher Lloyd Jerry Jansson Joseph Thomson Blazej Dariusz Roszkowski + Phil Jordan Dave Moore Roy Eltham + Hayaki Saito Nathan Reed Won Chun + Luke Graham Johan Duparc Nick Verigakis the Horde3D community + Thomas Ruf Ronny Chevalier github:rlyeh + Janez Zemva John Bartholomew Michal Cichon github:romigrou + Jonathan Blow Ken Hamada Tero Hanninen github:svdijk + Eugene Golushkov Laurent Gomila Cort Stratton github:snagar + Aruelien Pocheville Sergio Gonzalez Thibault Reuille github:Zelex + Cass Everitt Ryamond Barbiero github:grim210 + Paul Du Bois Engin Manap Aldo Culquicondor github:sammyhw + Philipp Wiesemann Dale Weiler Oriol Ferrer Mesia github:phprus + Josh Tobin Neil Bickford Matthew Gregan github:poppolopoppo + Julian Raschke Gregory Mullen Christian Floisand github:darealshinji + Baldur Karlsson Kevin Schmidt JR Smith github:Michaelangel007 + Brad Weinberger Matvey Cherevko github:mosra + Luca Sas Alexander Veselov Zack Middleton [reserved] + Ryan C. Gordon [reserved] [reserved] + DO NOT ADD YOUR NAME HERE + + Jacko Dirks + + To add your name to the credits, pick a random blank space in the middle and fill it. + 80% of merge conflicts on stb PRs are due to people adding their name at the end + of the credits. +*/ + +#ifndef STBI_INCLUDE_STB_IMAGE_H +#define STBI_INCLUDE_STB_IMAGE_H + +// DOCUMENTATION +// +// Limitations: +// - no 12-bit-per-channel JPEG +// - no JPEGs with arithmetic coding +// - GIF always returns *comp=4 +// +// Basic usage (see HDR discussion below for HDR usage): +// int x,y,n; +// unsigned char *data = stbi_load(filename, &x, &y, &n, 0); +// // ... process data if not NULL ... +// // ... x = width, y = height, n = # 8-bit components per pixel ... +// // ... replace '0' with '1'..'4' to force that many components per pixel +// // ... but 'n' will always be the number that it would have been if you said 0 +// stbi_image_free(data); +// +// Standard parameters: +// int *x -- outputs image width in pixels +// int *y -- outputs image height in pixels +// int *channels_in_file -- outputs # of image components in image file +// int desired_channels -- if non-zero, # of image components requested in result +// +// The return value from an image loader is an 'unsigned char *' which points +// to the pixel data, or NULL on an allocation failure or if the image is +// corrupt or invalid. The pixel data consists of *y scanlines of *x pixels, +// with each pixel consisting of N interleaved 8-bit components; the first +// pixel pointed to is top-left-most in the image. There is no padding between +// image scanlines or between pixels, regardless of format. The number of +// components N is 'desired_channels' if desired_channels is non-zero, or +// *channels_in_file otherwise. If desired_channels is non-zero, +// *channels_in_file has the number of components that _would_ have been +// output otherwise. E.g. if you set desired_channels to 4, you will always +// get RGBA output, but you can check *channels_in_file to see if it's trivially +// opaque because e.g. there were only 3 channels in the source image. +// +// An output image with N components has the following components interleaved +// in this order in each pixel: +// +// N=#comp components +// 1 grey +// 2 grey, alpha +// 3 red, green, blue +// 4 red, green, blue, alpha +// +// If image loading fails for any reason, the return value will be NULL, +// and *x, *y, *channels_in_file will be unchanged. The function +// stbi_failure_reason() can be queried for an extremely brief, end-user +// unfriendly explanation of why the load failed. Define STBI_NO_FAILURE_STRINGS +// to avoid compiling these strings at all, and STBI_FAILURE_USERMSG to get slightly +// more user-friendly ones. +// +// Paletted PNG, BMP, GIF, and PIC images are automatically depalettized. +// +// To query the width, height and component count of an image without having to +// decode the full file, you can use the stbi_info family of functions: +// +// int x,y,n,ok; +// ok = stbi_info(filename, &x, &y, &n); +// // returns ok=1 and sets x, y, n if image is a supported format, +// // 0 otherwise. +// +// Note that stb_image pervasively uses ints in its public API for sizes, +// including sizes of memory buffers. This is now part of the API and thus +// hard to change without causing breakage. As a result, the various image +// loaders all have certain limits on image size; these differ somewhat +// by format but generally boil down to either just under 2GB or just under +// 1GB. When the decoded image would be larger than this, stb_image decoding +// will fail. +// +// Additionally, stb_image will reject image files that have any of their +// dimensions set to a larger value than the configurable STBI_MAX_DIMENSIONS, +// which defaults to 2**24 = 16777216 pixels. Due to the above memory limit, +// the only way to have an image with such dimensions load correctly +// is for it to have a rather extreme aspect ratio. Either way, the +// assumption here is that such larger images are likely to be malformed +// or malicious. If you do need to load an image with individual dimensions +// larger than that, and it still fits in the overall size limit, you can +// #define STBI_MAX_DIMENSIONS on your own to be something larger. +// +// =========================================================================== +// +// UNICODE: +// +// If compiling for Windows and you wish to use Unicode filenames, compile +// with +// #define STBI_WINDOWS_UTF8 +// and pass utf8-encoded filenames. Call stbi_convert_wchar_to_utf8 to convert +// Windows wchar_t filenames to utf8. +// +// =========================================================================== +// +// Philosophy +// +// stb libraries are designed with the following priorities: +// +// 1. easy to use +// 2. easy to maintain +// 3. good performance +// +// Sometimes I let "good performance" creep up in priority over "easy to maintain", +// and for best performance I may provide less-easy-to-use APIs that give higher +// performance, in addition to the easy-to-use ones. Nevertheless, it's important +// to keep in mind that from the standpoint of you, a client of this library, +// all you care about is #1 and #3, and stb libraries DO NOT emphasize #3 above all. +// +// Some secondary priorities arise directly from the first two, some of which +// provide more explicit reasons why performance can't be emphasized. +// +// - Portable ("ease of use") +// - Small source code footprint ("easy to maintain") +// - No dependencies ("ease of use") +// +// =========================================================================== +// +// I/O callbacks +// +// I/O callbacks allow you to read from arbitrary sources, like packaged +// files or some other source. Data read from callbacks are processed +// through a small internal buffer (currently 128 bytes) to try to reduce +// overhead. +// +// The three functions you must define are "read" (reads some bytes of data), +// "skip" (skips some bytes of data), "eof" (reports if the stream is at the end). +// +// =========================================================================== +// +// SIMD support +// +// The JPEG decoder will try to automatically use SIMD kernels on x86 when +// supported by the compiler. For ARM Neon support, you must explicitly +// request it. +// +// (The old do-it-yourself SIMD API is no longer supported in the current +// code.) +// +// On x86, SSE2 will automatically be used when available based on a run-time +// test; if not, the generic C versions are used as a fall-back. On ARM targets, +// the typical path is to have separate builds for NEON and non-NEON devices +// (at least this is true for iOS and Android). Therefore, the NEON support is +// toggled by a build flag: define STBI_NEON to get NEON loops. +// +// If for some reason you do not want to use any of SIMD code, or if +// you have issues compiling it, you can disable it entirely by +// defining STBI_NO_SIMD. +// +// =========================================================================== +// +// HDR image support (disable by defining STBI_NO_HDR) +// +// stb_image supports loading HDR images in general, and currently the Radiance +// .HDR file format specifically. You can still load any file through the existing +// interface; if you attempt to load an HDR file, it will be automatically remapped +// to LDR, assuming gamma 2.2 and an arbitrary scale factor defaulting to 1; +// both of these constants can be reconfigured through this interface: +// +// stbi_hdr_to_ldr_gamma(2.2f); +// stbi_hdr_to_ldr_scale(1.0f); +// +// (note, do not use _inverse_ constants; stbi_image will invert them +// appropriately). +// +// Additionally, there is a new, parallel interface for loading files as +// (linear) floats to preserve the full dynamic range: +// +// float *data = stbi_loadf(filename, &x, &y, &n, 0); +// +// If you load LDR images through this interface, those images will +// be promoted to floating point values, run through the inverse of +// constants corresponding to the above: +// +// stbi_ldr_to_hdr_scale(1.0f); +// stbi_ldr_to_hdr_gamma(2.2f); +// +// Finally, given a filename (or an open file or memory block--see header +// file for details) containing image data, you can query for the "most +// appropriate" interface to use (that is, whether the image is HDR or +// not), using: +// +// stbi_is_hdr(char *filename); +// +// =========================================================================== +// +// iPhone PNG support: +// +// We optionally support converting iPhone-formatted PNGs (which store +// premultiplied BGRA) back to RGB, even though they're internally encoded +// differently. To enable this conversion, call +// stbi_convert_iphone_png_to_rgb(1). +// +// Call stbi_set_unpremultiply_on_load(1) as well to force a divide per +// pixel to remove any premultiplied alpha *only* if the image file explicitly +// says there's premultiplied data (currently only happens in iPhone images, +// and only if iPhone convert-to-rgb processing is on). +// +// =========================================================================== +// +// ADDITIONAL CONFIGURATION +// +// - You can suppress implementation of any of the decoders to reduce +// your code footprint by #defining one or more of the following +// symbols before creating the implementation. +// +// STBI_NO_JPEG +// STBI_NO_PNG +// STBI_NO_BMP +// STBI_NO_PSD +// STBI_NO_TGA +// STBI_NO_GIF +// STBI_NO_HDR +// STBI_NO_PIC +// STBI_NO_PNM (.ppm and .pgm) +// +// - You can request *only* certain decoders and suppress all other ones +// (this will be more forward-compatible, as addition of new decoders +// doesn't require you to disable them explicitly): +// +// STBI_ONLY_JPEG +// STBI_ONLY_PNG +// STBI_ONLY_BMP +// STBI_ONLY_PSD +// STBI_ONLY_TGA +// STBI_ONLY_GIF +// STBI_ONLY_HDR +// STBI_ONLY_PIC +// STBI_ONLY_PNM (.ppm and .pgm) +// +// - If you use STBI_NO_PNG (or _ONLY_ without PNG), and you still +// want the zlib decoder to be available, #define STBI_SUPPORT_ZLIB +// +// - If you define STBI_MAX_DIMENSIONS, stb_image will reject images greater +// than that size (in either width or height) without further processing. +// This is to let programs in the wild set an upper bound to prevent +// denial-of-service attacks on untrusted data, as one could generate a +// valid image of gigantic dimensions and force stb_image to allocate a +// huge block of memory and spend disproportionate time decoding it. By +// default this is set to (1 << 24), which is 16777216, but that's still +// very big. + +#ifndef STBI_NO_STDIO +#include +#endif // STBI_NO_STDIO + +#define STBI_VERSION 1 + +enum +{ + STBI_default = 0, // only used for desired_channels + + STBI_grey = 1, + STBI_grey_alpha = 2, + STBI_rgb = 3, + STBI_rgb_alpha = 4 +}; + +#include +typedef unsigned char stbi_uc; +typedef unsigned short stbi_us; + +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef STBIDEF +#ifdef STB_IMAGE_STATIC +#define STBIDEF static +#else +#define STBIDEF extern +#endif +#endif + +////////////////////////////////////////////////////////////////////////////// +// +// PRIMARY API - works on images of any type +// + +// +// load image by filename, open file, or memory buffer +// + +typedef struct +{ + int (*read) (void *user,char *data,int size); // fill 'data' with 'size' bytes. return number of bytes actually read + void (*skip) (void *user,int n); // skip the next 'n' bytes, or 'unget' the last -n bytes if negative + int (*eof) (void *user); // returns nonzero if we are at end of file/data +} stbi_io_callbacks; + +//////////////////////////////////// +// +// 8-bits-per-channel interface +// + +STBIDEF stbi_uc *stbi_load_from_memory (stbi_uc const *buffer, int len , int *x, int *y, int *channels_in_file, int desired_channels); +STBIDEF stbi_uc *stbi_load_from_callbacks(stbi_io_callbacks const *clbk , void *user, int *x, int *y, int *channels_in_file, int desired_channels); + +#ifndef STBI_NO_STDIO +STBIDEF stbi_uc *stbi_load (char const *filename, int *x, int *y, int *channels_in_file, int desired_channels); +STBIDEF stbi_uc *stbi_load_from_file (FILE *f, int *x, int *y, int *channels_in_file, int desired_channels); +// for stbi_load_from_file, file pointer is left pointing immediately after image +#endif + +#ifndef STBI_NO_GIF +STBIDEF stbi_uc *stbi_load_gif_from_memory(stbi_uc const *buffer, int len, int **delays, int *x, int *y, int *z, int *comp, int req_comp); +#endif + +#ifdef STBI_WINDOWS_UTF8 +STBIDEF int stbi_convert_wchar_to_utf8(char *buffer, size_t bufferlen, const wchar_t* input); +#endif + +//////////////////////////////////// +// +// 16-bits-per-channel interface +// + +STBIDEF stbi_us *stbi_load_16_from_memory (stbi_uc const *buffer, int len, int *x, int *y, int *channels_in_file, int desired_channels); +STBIDEF stbi_us *stbi_load_16_from_callbacks(stbi_io_callbacks const *clbk, void *user, int *x, int *y, int *channels_in_file, int desired_channels); + +#ifndef STBI_NO_STDIO +STBIDEF stbi_us *stbi_load_16 (char const *filename, int *x, int *y, int *channels_in_file, int desired_channels); +STBIDEF stbi_us *stbi_load_from_file_16(FILE *f, int *x, int *y, int *channels_in_file, int desired_channels); +#endif + +//////////////////////////////////// +// +// float-per-channel interface +// +#ifndef STBI_NO_LINEAR + STBIDEF float *stbi_loadf_from_memory (stbi_uc const *buffer, int len, int *x, int *y, int *channels_in_file, int desired_channels); + STBIDEF float *stbi_loadf_from_callbacks (stbi_io_callbacks const *clbk, void *user, int *x, int *y, int *channels_in_file, int desired_channels); + + #ifndef STBI_NO_STDIO + STBIDEF float *stbi_loadf (char const *filename, int *x, int *y, int *channels_in_file, int desired_channels); + STBIDEF float *stbi_loadf_from_file (FILE *f, int *x, int *y, int *channels_in_file, int desired_channels); + #endif +#endif + +#ifndef STBI_NO_HDR + STBIDEF void stbi_hdr_to_ldr_gamma(float gamma); + STBIDEF void stbi_hdr_to_ldr_scale(float scale); +#endif // STBI_NO_HDR + +#ifndef STBI_NO_LINEAR + STBIDEF void stbi_ldr_to_hdr_gamma(float gamma); + STBIDEF void stbi_ldr_to_hdr_scale(float scale); +#endif // STBI_NO_LINEAR + +// stbi_is_hdr is always defined, but always returns false if STBI_NO_HDR +STBIDEF int stbi_is_hdr_from_callbacks(stbi_io_callbacks const *clbk, void *user); +STBIDEF int stbi_is_hdr_from_memory(stbi_uc const *buffer, int len); +#ifndef STBI_NO_STDIO +STBIDEF int stbi_is_hdr (char const *filename); +STBIDEF int stbi_is_hdr_from_file(FILE *f); +#endif // STBI_NO_STDIO + + +// get a VERY brief reason for failure +// on most compilers (and ALL modern mainstream compilers) this is threadsafe +STBIDEF const char *stbi_failure_reason (void); + +// free the loaded image -- this is just free() +STBIDEF void stbi_image_free (void *retval_from_stbi_load); + +// get image dimensions & components without fully decoding +STBIDEF int stbi_info_from_memory(stbi_uc const *buffer, int len, int *x, int *y, int *comp); +STBIDEF int stbi_info_from_callbacks(stbi_io_callbacks const *clbk, void *user, int *x, int *y, int *comp); +STBIDEF int stbi_is_16_bit_from_memory(stbi_uc const *buffer, int len); +STBIDEF int stbi_is_16_bit_from_callbacks(stbi_io_callbacks const *clbk, void *user); + +#ifndef STBI_NO_STDIO +STBIDEF int stbi_info (char const *filename, int *x, int *y, int *comp); +STBIDEF int stbi_info_from_file (FILE *f, int *x, int *y, int *comp); +STBIDEF int stbi_is_16_bit (char const *filename); +STBIDEF int stbi_is_16_bit_from_file(FILE *f); +#endif + + + +// for image formats that explicitly notate that they have premultiplied alpha, +// we just return the colors as stored in the file. set this flag to force +// unpremultiplication. results are undefined if the unpremultiply overflow. +STBIDEF void stbi_set_unpremultiply_on_load(int flag_true_if_should_unpremultiply); + +// indicate whether we should process iphone images back to canonical format, +// or just pass them through "as-is" +STBIDEF void stbi_convert_iphone_png_to_rgb(int flag_true_if_should_convert); + +// flip the image vertically, so the first pixel in the output array is the bottom left +STBIDEF void stbi_set_flip_vertically_on_load(int flag_true_if_should_flip); + +// as above, but only applies to images loaded on the thread that calls the function +// this function is only available if your compiler supports thread-local variables; +// calling it will fail to link if your compiler doesn't +STBIDEF void stbi_set_unpremultiply_on_load_thread(int flag_true_if_should_unpremultiply); +STBIDEF void stbi_convert_iphone_png_to_rgb_thread(int flag_true_if_should_convert); +STBIDEF void stbi_set_flip_vertically_on_load_thread(int flag_true_if_should_flip); + +// ZLIB client - used by PNG, available for other purposes + +STBIDEF char *stbi_zlib_decode_malloc_guesssize(const char *buffer, int len, int initial_size, int *outlen); +STBIDEF char *stbi_zlib_decode_malloc_guesssize_headerflag(const char *buffer, int len, int initial_size, int *outlen, int parse_header); +STBIDEF char *stbi_zlib_decode_malloc(const char *buffer, int len, int *outlen); +STBIDEF int stbi_zlib_decode_buffer(char *obuffer, int olen, const char *ibuffer, int ilen); + +STBIDEF char *stbi_zlib_decode_noheader_malloc(const char *buffer, int len, int *outlen); +STBIDEF int stbi_zlib_decode_noheader_buffer(char *obuffer, int olen, const char *ibuffer, int ilen); + + +#ifdef __cplusplus +} +#endif + +// +// +//// end header file ///////////////////////////////////////////////////// +#endif // STBI_INCLUDE_STB_IMAGE_H + +#ifdef STB_IMAGE_IMPLEMENTATION + +#if defined(STBI_ONLY_JPEG) || defined(STBI_ONLY_PNG) || defined(STBI_ONLY_BMP) \ + || defined(STBI_ONLY_TGA) || defined(STBI_ONLY_GIF) || defined(STBI_ONLY_PSD) \ + || defined(STBI_ONLY_HDR) || defined(STBI_ONLY_PIC) || defined(STBI_ONLY_PNM) \ + || defined(STBI_ONLY_ZLIB) + #ifndef STBI_ONLY_JPEG + #define STBI_NO_JPEG + #endif + #ifndef STBI_ONLY_PNG + #define STBI_NO_PNG + #endif + #ifndef STBI_ONLY_BMP + #define STBI_NO_BMP + #endif + #ifndef STBI_ONLY_PSD + #define STBI_NO_PSD + #endif + #ifndef STBI_ONLY_TGA + #define STBI_NO_TGA + #endif + #ifndef STBI_ONLY_GIF + #define STBI_NO_GIF + #endif + #ifndef STBI_ONLY_HDR + #define STBI_NO_HDR + #endif + #ifndef STBI_ONLY_PIC + #define STBI_NO_PIC + #endif + #ifndef STBI_ONLY_PNM + #define STBI_NO_PNM + #endif +#endif + +#if defined(STBI_NO_PNG) && !defined(STBI_SUPPORT_ZLIB) && !defined(STBI_NO_ZLIB) +#define STBI_NO_ZLIB +#endif + + +#include +#include // ptrdiff_t on osx +#include +#include +#include + +#if !defined(STBI_NO_LINEAR) || !defined(STBI_NO_HDR) +#include // ldexp, pow +#endif + +#ifndef STBI_NO_STDIO +#include +#endif + +#ifndef STBI_ASSERT +#include +#define STBI_ASSERT(x) assert(x) +#endif + +#ifdef __cplusplus +#define STBI_EXTERN extern "C" +#else +#define STBI_EXTERN extern +#endif + + +#ifndef _MSC_VER + #ifdef __cplusplus + #define stbi_inline inline + #else + #define stbi_inline + #endif +#else + #define stbi_inline __forceinline +#endif + +#ifndef STBI_NO_THREAD_LOCALS + #if defined(__cplusplus) && __cplusplus >= 201103L + #define STBI_THREAD_LOCAL thread_local + #elif defined(__GNUC__) && __GNUC__ < 5 + #define STBI_THREAD_LOCAL __thread + #elif defined(_MSC_VER) + #define STBI_THREAD_LOCAL __declspec(thread) + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_THREADS__) + #define STBI_THREAD_LOCAL _Thread_local + #endif + + #ifndef STBI_THREAD_LOCAL + #if defined(__GNUC__) + #define STBI_THREAD_LOCAL __thread + #endif + #endif +#endif + +#if defined(_MSC_VER) || defined(__SYMBIAN32__) +typedef unsigned short stbi__uint16; +typedef signed short stbi__int16; +typedef unsigned int stbi__uint32; +typedef signed int stbi__int32; +#else +#include +typedef uint16_t stbi__uint16; +typedef int16_t stbi__int16; +typedef uint32_t stbi__uint32; +typedef int32_t stbi__int32; +#endif + +// should produce compiler error if size is wrong +typedef unsigned char validate_uint32[sizeof(stbi__uint32)==4 ? 1 : -1]; + +#ifdef _MSC_VER +#define STBI_NOTUSED(v) (void)(v) +#else +#define STBI_NOTUSED(v) (void)sizeof(v) +#endif + +#ifdef _MSC_VER +#define STBI_HAS_LROTL +#endif + +#ifdef STBI_HAS_LROTL + #define stbi_lrot(x,y) _lrotl(x,y) +#else + #define stbi_lrot(x,y) (((x) << (y)) | ((x) >> (-(y) & 31))) +#endif + +#if defined(STBI_MALLOC) && defined(STBI_FREE) && (defined(STBI_REALLOC) || defined(STBI_REALLOC_SIZED)) +// ok +#elif !defined(STBI_MALLOC) && !defined(STBI_FREE) && !defined(STBI_REALLOC) && !defined(STBI_REALLOC_SIZED) +// ok +#else +#error "Must define all or none of STBI_MALLOC, STBI_FREE, and STBI_REALLOC (or STBI_REALLOC_SIZED)." +#endif + +#ifndef STBI_MALLOC +#define STBI_MALLOC(sz) malloc(sz) +#define STBI_REALLOC(p,newsz) realloc(p,newsz) +#define STBI_FREE(p) free(p) +#endif + +#ifndef STBI_REALLOC_SIZED +#define STBI_REALLOC_SIZED(p,oldsz,newsz) STBI_REALLOC(p,newsz) +#endif + +// x86/x64 detection +#if defined(__x86_64__) || defined(_M_X64) +#define STBI__X64_TARGET +#elif defined(__i386) || defined(_M_IX86) +#define STBI__X86_TARGET +#endif + +#if defined(__GNUC__) && defined(STBI__X86_TARGET) && !defined(__SSE2__) && !defined(STBI_NO_SIMD) +// gcc doesn't support sse2 intrinsics unless you compile with -msse2, +// which in turn means it gets to use SSE2 everywhere. This is unfortunate, +// but previous attempts to provide the SSE2 functions with runtime +// detection caused numerous issues. The way architecture extensions are +// exposed in GCC/Clang is, sadly, not really suited for one-file libs. +// New behavior: if compiled with -msse2, we use SSE2 without any +// detection; if not, we don't use it at all. +#define STBI_NO_SIMD +#endif + +#if defined(__MINGW32__) && defined(STBI__X86_TARGET) && !defined(STBI_MINGW_ENABLE_SSE2) && !defined(STBI_NO_SIMD) +// Note that __MINGW32__ doesn't actually mean 32-bit, so we have to avoid STBI__X64_TARGET +// +// 32-bit MinGW wants ESP to be 16-byte aligned, but this is not in the +// Windows ABI and VC++ as well as Windows DLLs don't maintain that invariant. +// As a result, enabling SSE2 on 32-bit MinGW is dangerous when not +// simultaneously enabling "-mstackrealign". +// +// See https://github.com/nothings/stb/issues/81 for more information. +// +// So default to no SSE2 on 32-bit MinGW. If you've read this far and added +// -mstackrealign to your build settings, feel free to #define STBI_MINGW_ENABLE_SSE2. +#define STBI_NO_SIMD +#endif + +#if !defined(STBI_NO_SIMD) && (defined(STBI__X86_TARGET) || defined(STBI__X64_TARGET)) +#define STBI_SSE2 +#include + +#ifdef _MSC_VER + +#if _MSC_VER >= 1400 // not VC6 +#include // __cpuid +static int stbi__cpuid3(void) +{ + int info[4]; + __cpuid(info,1); + return info[3]; +} +#else +static int stbi__cpuid3(void) +{ + int res; + __asm { + mov eax,1 + cpuid + mov res,edx + } + return res; +} +#endif + +#define STBI_SIMD_ALIGN(type, name) __declspec(align(16)) type name + +#if !defined(STBI_NO_JPEG) && defined(STBI_SSE2) +static int stbi__sse2_available(void) +{ + int info3 = stbi__cpuid3(); + return ((info3 >> 26) & 1) != 0; +} +#endif + +#else // assume GCC-style if not VC++ +#define STBI_SIMD_ALIGN(type, name) type name __attribute__((aligned(16))) + +#if !defined(STBI_NO_JPEG) && defined(STBI_SSE2) +static int stbi__sse2_available(void) +{ + // If we're even attempting to compile this on GCC/Clang, that means + // -msse2 is on, which means the compiler is allowed to use SSE2 + // instructions at will, and so are we. + return 1; +} +#endif + +#endif +#endif + +// ARM NEON +#if defined(STBI_NO_SIMD) && defined(STBI_NEON) +#undef STBI_NEON +#endif + +#ifdef STBI_NEON +#include +#ifdef _MSC_VER +#define STBI_SIMD_ALIGN(type, name) __declspec(align(16)) type name +#else +#define STBI_SIMD_ALIGN(type, name) type name __attribute__((aligned(16))) +#endif +#endif + +#ifndef STBI_SIMD_ALIGN +#define STBI_SIMD_ALIGN(type, name) type name +#endif + +#ifndef STBI_MAX_DIMENSIONS +#define STBI_MAX_DIMENSIONS (1 << 24) +#endif + +/////////////////////////////////////////////// +// +// stbi__context struct and start_xxx functions + +// stbi__context structure is our basic context used by all images, so it +// contains all the IO context, plus some basic image information +typedef struct +{ + stbi__uint32 img_x, img_y; + int img_n, img_out_n; + + stbi_io_callbacks io; + void *io_user_data; + + int read_from_callbacks; + int buflen; + stbi_uc buffer_start[128]; + int callback_already_read; + + stbi_uc *img_buffer, *img_buffer_end; + stbi_uc *img_buffer_original, *img_buffer_original_end; +} stbi__context; + + +static void stbi__refill_buffer(stbi__context *s); + +// initialize a memory-decode context +static void stbi__start_mem(stbi__context *s, stbi_uc const *buffer, int len) +{ + s->io.read = NULL; + s->read_from_callbacks = 0; + s->callback_already_read = 0; + s->img_buffer = s->img_buffer_original = (stbi_uc *) buffer; + s->img_buffer_end = s->img_buffer_original_end = (stbi_uc *) buffer+len; +} + +// initialize a callback-based context +static void stbi__start_callbacks(stbi__context *s, stbi_io_callbacks *c, void *user) +{ + s->io = *c; + s->io_user_data = user; + s->buflen = sizeof(s->buffer_start); + s->read_from_callbacks = 1; + s->callback_already_read = 0; + s->img_buffer = s->img_buffer_original = s->buffer_start; + stbi__refill_buffer(s); + s->img_buffer_original_end = s->img_buffer_end; +} + +#ifndef STBI_NO_STDIO + +static int stbi__stdio_read(void *user, char *data, int size) +{ + return (int) fread(data,1,size,(FILE*) user); +} + +static void stbi__stdio_skip(void *user, int n) +{ + int ch; + fseek((FILE*) user, n, SEEK_CUR); + ch = fgetc((FILE*) user); /* have to read a byte to reset feof()'s flag */ + if (ch != EOF) { + ungetc(ch, (FILE *) user); /* push byte back onto stream if valid. */ + } +} + +static int stbi__stdio_eof(void *user) +{ + return feof((FILE*) user) || ferror((FILE *) user); +} + +static stbi_io_callbacks stbi__stdio_callbacks = +{ + stbi__stdio_read, + stbi__stdio_skip, + stbi__stdio_eof, +}; + +static void stbi__start_file(stbi__context *s, FILE *f) +{ + stbi__start_callbacks(s, &stbi__stdio_callbacks, (void *) f); +} + +//static void stop_file(stbi__context *s) { } + +#endif // !STBI_NO_STDIO + +static void stbi__rewind(stbi__context *s) +{ + // conceptually rewind SHOULD rewind to the beginning of the stream, + // but we just rewind to the beginning of the initial buffer, because + // we only use it after doing 'test', which only ever looks at at most 92 bytes + s->img_buffer = s->img_buffer_original; + s->img_buffer_end = s->img_buffer_original_end; +} + +enum +{ + STBI_ORDER_RGB, + STBI_ORDER_BGR +}; + +typedef struct +{ + int bits_per_channel; + int num_channels; + int channel_order; +} stbi__result_info; + +#ifndef STBI_NO_JPEG +static int stbi__jpeg_test(stbi__context *s); +static void *stbi__jpeg_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri); +static int stbi__jpeg_info(stbi__context *s, int *x, int *y, int *comp); +#endif + +#ifndef STBI_NO_PNG +static int stbi__png_test(stbi__context *s); +static void *stbi__png_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri); +static int stbi__png_info(stbi__context *s, int *x, int *y, int *comp); +static int stbi__png_is16(stbi__context *s); +#endif + +#ifndef STBI_NO_BMP +static int stbi__bmp_test(stbi__context *s); +static void *stbi__bmp_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri); +static int stbi__bmp_info(stbi__context *s, int *x, int *y, int *comp); +#endif + +#ifndef STBI_NO_TGA +static int stbi__tga_test(stbi__context *s); +static void *stbi__tga_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri); +static int stbi__tga_info(stbi__context *s, int *x, int *y, int *comp); +#endif + +#ifndef STBI_NO_PSD +static int stbi__psd_test(stbi__context *s); +static void *stbi__psd_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri, int bpc); +static int stbi__psd_info(stbi__context *s, int *x, int *y, int *comp); +static int stbi__psd_is16(stbi__context *s); +#endif + +#ifndef STBI_NO_HDR +static int stbi__hdr_test(stbi__context *s); +static float *stbi__hdr_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri); +static int stbi__hdr_info(stbi__context *s, int *x, int *y, int *comp); +#endif + +#ifndef STBI_NO_PIC +static int stbi__pic_test(stbi__context *s); +static void *stbi__pic_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri); +static int stbi__pic_info(stbi__context *s, int *x, int *y, int *comp); +#endif + +#ifndef STBI_NO_GIF +static int stbi__gif_test(stbi__context *s); +static void *stbi__gif_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri); +static void *stbi__load_gif_main(stbi__context *s, int **delays, int *x, int *y, int *z, int *comp, int req_comp); +static int stbi__gif_info(stbi__context *s, int *x, int *y, int *comp); +#endif + +#ifndef STBI_NO_PNM +static int stbi__pnm_test(stbi__context *s); +static void *stbi__pnm_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri); +static int stbi__pnm_info(stbi__context *s, int *x, int *y, int *comp); +static int stbi__pnm_is16(stbi__context *s); +#endif + +static +#ifdef STBI_THREAD_LOCAL +STBI_THREAD_LOCAL +#endif +const char *stbi__g_failure_reason; + +STBIDEF const char *stbi_failure_reason(void) +{ + return stbi__g_failure_reason; +} + +#ifndef STBI_NO_FAILURE_STRINGS +static int stbi__err(const char *str) +{ + stbi__g_failure_reason = str; + return 0; +} +#endif + +static void *stbi__malloc(size_t size) +{ + return STBI_MALLOC(size); +} + +// stb_image uses ints pervasively, including for offset calculations. +// therefore the largest decoded image size we can support with the +// current code, even on 64-bit targets, is INT_MAX. this is not a +// significant limitation for the intended use case. +// +// we do, however, need to make sure our size calculations don't +// overflow. hence a few helper functions for size calculations that +// multiply integers together, making sure that they're non-negative +// and no overflow occurs. + +// return 1 if the sum is valid, 0 on overflow. +// negative terms are considered invalid. +static int stbi__addsizes_valid(int a, int b) +{ + if (b < 0) return 0; + // now 0 <= b <= INT_MAX, hence also + // 0 <= INT_MAX - b <= INTMAX. + // And "a + b <= INT_MAX" (which might overflow) is the + // same as a <= INT_MAX - b (no overflow) + return a <= INT_MAX - b; +} + +// returns 1 if the product is valid, 0 on overflow. +// negative factors are considered invalid. +static int stbi__mul2sizes_valid(int a, int b) +{ + if (a < 0 || b < 0) return 0; + if (b == 0) return 1; // mul-by-0 is always safe + // portable way to check for no overflows in a*b + return a <= INT_MAX/b; +} + +#if !defined(STBI_NO_JPEG) || !defined(STBI_NO_PNG) || !defined(STBI_NO_TGA) || !defined(STBI_NO_HDR) +// returns 1 if "a*b + add" has no negative terms/factors and doesn't overflow +static int stbi__mad2sizes_valid(int a, int b, int add) +{ + return stbi__mul2sizes_valid(a, b) && stbi__addsizes_valid(a*b, add); +} +#endif + +// returns 1 if "a*b*c + add" has no negative terms/factors and doesn't overflow +static int stbi__mad3sizes_valid(int a, int b, int c, int add) +{ + return stbi__mul2sizes_valid(a, b) && stbi__mul2sizes_valid(a*b, c) && + stbi__addsizes_valid(a*b*c, add); +} + +// returns 1 if "a*b*c*d + add" has no negative terms/factors and doesn't overflow +#if !defined(STBI_NO_LINEAR) || !defined(STBI_NO_HDR) || !defined(STBI_NO_PNM) +static int stbi__mad4sizes_valid(int a, int b, int c, int d, int add) +{ + return stbi__mul2sizes_valid(a, b) && stbi__mul2sizes_valid(a*b, c) && + stbi__mul2sizes_valid(a*b*c, d) && stbi__addsizes_valid(a*b*c*d, add); +} +#endif + +#if !defined(STBI_NO_JPEG) || !defined(STBI_NO_PNG) || !defined(STBI_NO_TGA) || !defined(STBI_NO_HDR) +// mallocs with size overflow checking +static void *stbi__malloc_mad2(int a, int b, int add) +{ + if (!stbi__mad2sizes_valid(a, b, add)) return NULL; + return stbi__malloc(a*b + add); +} +#endif + +static void *stbi__malloc_mad3(int a, int b, int c, int add) +{ + if (!stbi__mad3sizes_valid(a, b, c, add)) return NULL; + return stbi__malloc(a*b*c + add); +} + +#if !defined(STBI_NO_LINEAR) || !defined(STBI_NO_HDR) || !defined(STBI_NO_PNM) +static void *stbi__malloc_mad4(int a, int b, int c, int d, int add) +{ + if (!stbi__mad4sizes_valid(a, b, c, d, add)) return NULL; + return stbi__malloc(a*b*c*d + add); +} +#endif + +// returns 1 if the sum of two signed ints is valid (between -2^31 and 2^31-1 inclusive), 0 on overflow. +static int stbi__addints_valid(int a, int b) +{ + if ((a >= 0) != (b >= 0)) return 1; // a and b have different signs, so no overflow + if (a < 0 && b < 0) return a >= INT_MIN - b; // same as a + b >= INT_MIN; INT_MIN - b cannot overflow since b < 0. + return a <= INT_MAX - b; +} + +// returns 1 if the product of two signed shorts is valid, 0 on overflow. +static int stbi__mul2shorts_valid(short a, short b) +{ + if (b == 0 || b == -1) return 1; // multiplication by 0 is always 0; check for -1 so SHRT_MIN/b doesn't overflow + if ((a >= 0) == (b >= 0)) return a <= SHRT_MAX/b; // product is positive, so similar to mul2sizes_valid + if (b < 0) return a <= SHRT_MIN / b; // same as a * b >= SHRT_MIN + return a >= SHRT_MIN / b; +} + +// stbi__err - error +// stbi__errpf - error returning pointer to float +// stbi__errpuc - error returning pointer to unsigned char + +#ifdef STBI_NO_FAILURE_STRINGS + #define stbi__err(x,y) 0 +#elif defined(STBI_FAILURE_USERMSG) + #define stbi__err(x,y) stbi__err(y) +#else + #define stbi__err(x,y) stbi__err(x) +#endif + +#define stbi__errpf(x,y) ((float *)(size_t) (stbi__err(x,y)?NULL:NULL)) +#define stbi__errpuc(x,y) ((unsigned char *)(size_t) (stbi__err(x,y)?NULL:NULL)) + +STBIDEF void stbi_image_free(void *retval_from_stbi_load) +{ + STBI_FREE(retval_from_stbi_load); +} + +#ifndef STBI_NO_LINEAR +static float *stbi__ldr_to_hdr(stbi_uc *data, int x, int y, int comp); +#endif + +#ifndef STBI_NO_HDR +static stbi_uc *stbi__hdr_to_ldr(float *data, int x, int y, int comp); +#endif + +static int stbi__vertically_flip_on_load_global = 0; + +STBIDEF void stbi_set_flip_vertically_on_load(int flag_true_if_should_flip) +{ + stbi__vertically_flip_on_load_global = flag_true_if_should_flip; +} + +#ifndef STBI_THREAD_LOCAL +#define stbi__vertically_flip_on_load stbi__vertically_flip_on_load_global +#else +static STBI_THREAD_LOCAL int stbi__vertically_flip_on_load_local, stbi__vertically_flip_on_load_set; + +STBIDEF void stbi_set_flip_vertically_on_load_thread(int flag_true_if_should_flip) +{ + stbi__vertically_flip_on_load_local = flag_true_if_should_flip; + stbi__vertically_flip_on_load_set = 1; +} + +#define stbi__vertically_flip_on_load (stbi__vertically_flip_on_load_set \ + ? stbi__vertically_flip_on_load_local \ + : stbi__vertically_flip_on_load_global) +#endif // STBI_THREAD_LOCAL + +static void *stbi__load_main(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri, int bpc) +{ + memset(ri, 0, sizeof(*ri)); // make sure it's initialized if we add new fields + ri->bits_per_channel = 8; // default is 8 so most paths don't have to be changed + ri->channel_order = STBI_ORDER_RGB; // all current input & output are this, but this is here so we can add BGR order + ri->num_channels = 0; + + // test the formats with a very explicit header first (at least a FOURCC + // or distinctive magic number first) + #ifndef STBI_NO_PNG + if (stbi__png_test(s)) return stbi__png_load(s,x,y,comp,req_comp, ri); + #endif + #ifndef STBI_NO_BMP + if (stbi__bmp_test(s)) return stbi__bmp_load(s,x,y,comp,req_comp, ri); + #endif + #ifndef STBI_NO_GIF + if (stbi__gif_test(s)) return stbi__gif_load(s,x,y,comp,req_comp, ri); + #endif + #ifndef STBI_NO_PSD + if (stbi__psd_test(s)) return stbi__psd_load(s,x,y,comp,req_comp, ri, bpc); + #else + STBI_NOTUSED(bpc); + #endif + #ifndef STBI_NO_PIC + if (stbi__pic_test(s)) return stbi__pic_load(s,x,y,comp,req_comp, ri); + #endif + + // then the formats that can end up attempting to load with just 1 or 2 + // bytes matching expectations; these are prone to false positives, so + // try them later + #ifndef STBI_NO_JPEG + if (stbi__jpeg_test(s)) return stbi__jpeg_load(s,x,y,comp,req_comp, ri); + #endif + #ifndef STBI_NO_PNM + if (stbi__pnm_test(s)) return stbi__pnm_load(s,x,y,comp,req_comp, ri); + #endif + + #ifndef STBI_NO_HDR + if (stbi__hdr_test(s)) { + float *hdr = stbi__hdr_load(s, x,y,comp,req_comp, ri); + return stbi__hdr_to_ldr(hdr, *x, *y, req_comp ? req_comp : *comp); + } + #endif + + #ifndef STBI_NO_TGA + // test tga last because it's a crappy test! + if (stbi__tga_test(s)) + return stbi__tga_load(s,x,y,comp,req_comp, ri); + #endif + + return stbi__errpuc("unknown image type", "Image not of any known type, or corrupt"); +} + +static stbi_uc *stbi__convert_16_to_8(stbi__uint16 *orig, int w, int h, int channels) +{ + int i; + int img_len = w * h * channels; + stbi_uc *reduced; + + reduced = (stbi_uc *) stbi__malloc(img_len); + if (reduced == NULL) return stbi__errpuc("outofmem", "Out of memory"); + + for (i = 0; i < img_len; ++i) + reduced[i] = (stbi_uc)((orig[i] >> 8) & 0xFF); // top half of each byte is sufficient approx of 16->8 bit scaling + + STBI_FREE(orig); + return reduced; +} + +static stbi__uint16 *stbi__convert_8_to_16(stbi_uc *orig, int w, int h, int channels) +{ + int i; + int img_len = w * h * channels; + stbi__uint16 *enlarged; + + enlarged = (stbi__uint16 *) stbi__malloc(img_len*2); + if (enlarged == NULL) return (stbi__uint16 *) stbi__errpuc("outofmem", "Out of memory"); + + for (i = 0; i < img_len; ++i) + enlarged[i] = (stbi__uint16)((orig[i] << 8) + orig[i]); // replicate to high and low byte, maps 0->0, 255->0xffff + + STBI_FREE(orig); + return enlarged; +} + +static void stbi__vertical_flip(void *image, int w, int h, int bytes_per_pixel) +{ + int row; + size_t bytes_per_row = (size_t)w * bytes_per_pixel; + stbi_uc temp[2048]; + stbi_uc *bytes = (stbi_uc *)image; + + for (row = 0; row < (h>>1); row++) { + stbi_uc *row0 = bytes + row*bytes_per_row; + stbi_uc *row1 = bytes + (h - row - 1)*bytes_per_row; + // swap row0 with row1 + size_t bytes_left = bytes_per_row; + while (bytes_left) { + size_t bytes_copy = (bytes_left < sizeof(temp)) ? bytes_left : sizeof(temp); + memcpy(temp, row0, bytes_copy); + memcpy(row0, row1, bytes_copy); + memcpy(row1, temp, bytes_copy); + row0 += bytes_copy; + row1 += bytes_copy; + bytes_left -= bytes_copy; + } + } +} + +#ifndef STBI_NO_GIF +static void stbi__vertical_flip_slices(void *image, int w, int h, int z, int bytes_per_pixel) +{ + int slice; + int slice_size = w * h * bytes_per_pixel; + + stbi_uc *bytes = (stbi_uc *)image; + for (slice = 0; slice < z; ++slice) { + stbi__vertical_flip(bytes, w, h, bytes_per_pixel); + bytes += slice_size; + } +} +#endif + +static unsigned char *stbi__load_and_postprocess_8bit(stbi__context *s, int *x, int *y, int *comp, int req_comp) +{ + stbi__result_info ri; + void *result = stbi__load_main(s, x, y, comp, req_comp, &ri, 8); + + if (result == NULL) + return NULL; + + // it is the responsibility of the loaders to make sure we get either 8 or 16 bit. + STBI_ASSERT(ri.bits_per_channel == 8 || ri.bits_per_channel == 16); + + if (ri.bits_per_channel != 8) { + result = stbi__convert_16_to_8((stbi__uint16 *) result, *x, *y, req_comp == 0 ? *comp : req_comp); + ri.bits_per_channel = 8; + } + + // @TODO: move stbi__convert_format to here + + if (stbi__vertically_flip_on_load) { + int channels = req_comp ? req_comp : *comp; + stbi__vertical_flip(result, *x, *y, channels * sizeof(stbi_uc)); + } + + return (unsigned char *) result; +} + +static stbi__uint16 *stbi__load_and_postprocess_16bit(stbi__context *s, int *x, int *y, int *comp, int req_comp) +{ + stbi__result_info ri; + void *result = stbi__load_main(s, x, y, comp, req_comp, &ri, 16); + + if (result == NULL) + return NULL; + + // it is the responsibility of the loaders to make sure we get either 8 or 16 bit. + STBI_ASSERT(ri.bits_per_channel == 8 || ri.bits_per_channel == 16); + + if (ri.bits_per_channel != 16) { + result = stbi__convert_8_to_16((stbi_uc *) result, *x, *y, req_comp == 0 ? *comp : req_comp); + ri.bits_per_channel = 16; + } + + // @TODO: move stbi__convert_format16 to here + // @TODO: special case RGB-to-Y (and RGBA-to-YA) for 8-bit-to-16-bit case to keep more precision + + if (stbi__vertically_flip_on_load) { + int channels = req_comp ? req_comp : *comp; + stbi__vertical_flip(result, *x, *y, channels * sizeof(stbi__uint16)); + } + + return (stbi__uint16 *) result; +} + +#if !defined(STBI_NO_HDR) && !defined(STBI_NO_LINEAR) +static void stbi__float_postprocess(float *result, int *x, int *y, int *comp, int req_comp) +{ + if (stbi__vertically_flip_on_load && result != NULL) { + int channels = req_comp ? req_comp : *comp; + stbi__vertical_flip(result, *x, *y, channels * sizeof(float)); + } +} +#endif + +#ifndef STBI_NO_STDIO + +#if defined(_WIN32) && defined(STBI_WINDOWS_UTF8) +STBI_EXTERN __declspec(dllimport) int __stdcall MultiByteToWideChar(unsigned int cp, unsigned long flags, const char *str, int cbmb, wchar_t *widestr, int cchwide); +STBI_EXTERN __declspec(dllimport) int __stdcall WideCharToMultiByte(unsigned int cp, unsigned long flags, const wchar_t *widestr, int cchwide, char *str, int cbmb, const char *defchar, int *used_default); +#endif + +#if defined(_WIN32) && defined(STBI_WINDOWS_UTF8) +STBIDEF int stbi_convert_wchar_to_utf8(char *buffer, size_t bufferlen, const wchar_t* input) +{ + return WideCharToMultiByte(65001 /* UTF8 */, 0, input, -1, buffer, (int) bufferlen, NULL, NULL); +} +#endif + +static FILE *stbi__fopen(char const *filename, char const *mode) +{ + FILE *f; +#if defined(_WIN32) && defined(STBI_WINDOWS_UTF8) + wchar_t wMode[64]; + wchar_t wFilename[1024]; + if (0 == MultiByteToWideChar(65001 /* UTF8 */, 0, filename, -1, wFilename, sizeof(wFilename)/sizeof(*wFilename))) + return 0; + + if (0 == MultiByteToWideChar(65001 /* UTF8 */, 0, mode, -1, wMode, sizeof(wMode)/sizeof(*wMode))) + return 0; + +#if defined(_MSC_VER) && _MSC_VER >= 1400 + if (0 != _wfopen_s(&f, wFilename, wMode)) + f = 0; +#else + f = _wfopen(wFilename, wMode); +#endif + +#elif defined(_MSC_VER) && _MSC_VER >= 1400 + if (0 != fopen_s(&f, filename, mode)) + f=0; +#else + f = fopen(filename, mode); +#endif + return f; +} + + +STBIDEF stbi_uc *stbi_load(char const *filename, int *x, int *y, int *comp, int req_comp) +{ + FILE *f = stbi__fopen(filename, "rb"); + unsigned char *result; + if (!f) return stbi__errpuc("can't fopen", "Unable to open file"); + result = stbi_load_from_file(f,x,y,comp,req_comp); + fclose(f); + return result; +} + +STBIDEF stbi_uc *stbi_load_from_file(FILE *f, int *x, int *y, int *comp, int req_comp) +{ + unsigned char *result; + stbi__context s; + stbi__start_file(&s,f); + result = stbi__load_and_postprocess_8bit(&s,x,y,comp,req_comp); + if (result) { + // need to 'unget' all the characters in the IO buffer + fseek(f, - (int) (s.img_buffer_end - s.img_buffer), SEEK_CUR); + } + return result; +} + +STBIDEF stbi__uint16 *stbi_load_from_file_16(FILE *f, int *x, int *y, int *comp, int req_comp) +{ + stbi__uint16 *result; + stbi__context s; + stbi__start_file(&s,f); + result = stbi__load_and_postprocess_16bit(&s,x,y,comp,req_comp); + if (result) { + // need to 'unget' all the characters in the IO buffer + fseek(f, - (int) (s.img_buffer_end - s.img_buffer), SEEK_CUR); + } + return result; +} + +STBIDEF stbi_us *stbi_load_16(char const *filename, int *x, int *y, int *comp, int req_comp) +{ + FILE *f = stbi__fopen(filename, "rb"); + stbi__uint16 *result; + if (!f) return (stbi_us *) stbi__errpuc("can't fopen", "Unable to open file"); + result = stbi_load_from_file_16(f,x,y,comp,req_comp); + fclose(f); + return result; +} + + +#endif //!STBI_NO_STDIO + +STBIDEF stbi_us *stbi_load_16_from_memory(stbi_uc const *buffer, int len, int *x, int *y, int *channels_in_file, int desired_channels) +{ + stbi__context s; + stbi__start_mem(&s,buffer,len); + return stbi__load_and_postprocess_16bit(&s,x,y,channels_in_file,desired_channels); +} + +STBIDEF stbi_us *stbi_load_16_from_callbacks(stbi_io_callbacks const *clbk, void *user, int *x, int *y, int *channels_in_file, int desired_channels) +{ + stbi__context s; + stbi__start_callbacks(&s, (stbi_io_callbacks *)clbk, user); + return stbi__load_and_postprocess_16bit(&s,x,y,channels_in_file,desired_channels); +} + +STBIDEF stbi_uc *stbi_load_from_memory(stbi_uc const *buffer, int len, int *x, int *y, int *comp, int req_comp) +{ + stbi__context s; + stbi__start_mem(&s,buffer,len); + return stbi__load_and_postprocess_8bit(&s,x,y,comp,req_comp); +} + +STBIDEF stbi_uc *stbi_load_from_callbacks(stbi_io_callbacks const *clbk, void *user, int *x, int *y, int *comp, int req_comp) +{ + stbi__context s; + stbi__start_callbacks(&s, (stbi_io_callbacks *) clbk, user); + return stbi__load_and_postprocess_8bit(&s,x,y,comp,req_comp); +} + +#ifndef STBI_NO_GIF +STBIDEF stbi_uc *stbi_load_gif_from_memory(stbi_uc const *buffer, int len, int **delays, int *x, int *y, int *z, int *comp, int req_comp) +{ + unsigned char *result; + stbi__context s; + stbi__start_mem(&s,buffer,len); + + result = (unsigned char*) stbi__load_gif_main(&s, delays, x, y, z, comp, req_comp); + if (stbi__vertically_flip_on_load) { + stbi__vertical_flip_slices( result, *x, *y, *z, *comp ); + } + + return result; +} +#endif + +#ifndef STBI_NO_LINEAR +static float *stbi__loadf_main(stbi__context *s, int *x, int *y, int *comp, int req_comp) +{ + unsigned char *data; + #ifndef STBI_NO_HDR + if (stbi__hdr_test(s)) { + stbi__result_info ri; + float *hdr_data = stbi__hdr_load(s,x,y,comp,req_comp, &ri); + if (hdr_data) + stbi__float_postprocess(hdr_data,x,y,comp,req_comp); + return hdr_data; + } + #endif + data = stbi__load_and_postprocess_8bit(s, x, y, comp, req_comp); + if (data) + return stbi__ldr_to_hdr(data, *x, *y, req_comp ? req_comp : *comp); + return stbi__errpf("unknown image type", "Image not of any known type, or corrupt"); +} + +STBIDEF float *stbi_loadf_from_memory(stbi_uc const *buffer, int len, int *x, int *y, int *comp, int req_comp) +{ + stbi__context s; + stbi__start_mem(&s,buffer,len); + return stbi__loadf_main(&s,x,y,comp,req_comp); +} + +STBIDEF float *stbi_loadf_from_callbacks(stbi_io_callbacks const *clbk, void *user, int *x, int *y, int *comp, int req_comp) +{ + stbi__context s; + stbi__start_callbacks(&s, (stbi_io_callbacks *) clbk, user); + return stbi__loadf_main(&s,x,y,comp,req_comp); +} + +#ifndef STBI_NO_STDIO +STBIDEF float *stbi_loadf(char const *filename, int *x, int *y, int *comp, int req_comp) +{ + float *result; + FILE *f = stbi__fopen(filename, "rb"); + if (!f) return stbi__errpf("can't fopen", "Unable to open file"); + result = stbi_loadf_from_file(f,x,y,comp,req_comp); + fclose(f); + return result; +} + +STBIDEF float *stbi_loadf_from_file(FILE *f, int *x, int *y, int *comp, int req_comp) +{ + stbi__context s; + stbi__start_file(&s,f); + return stbi__loadf_main(&s,x,y,comp,req_comp); +} +#endif // !STBI_NO_STDIO + +#endif // !STBI_NO_LINEAR + +// these is-hdr-or-not is defined independent of whether STBI_NO_LINEAR is +// defined, for API simplicity; if STBI_NO_LINEAR is defined, it always +// reports false! + +STBIDEF int stbi_is_hdr_from_memory(stbi_uc const *buffer, int len) +{ + #ifndef STBI_NO_HDR + stbi__context s; + stbi__start_mem(&s,buffer,len); + return stbi__hdr_test(&s); + #else + STBI_NOTUSED(buffer); + STBI_NOTUSED(len); + return 0; + #endif +} + +#ifndef STBI_NO_STDIO +STBIDEF int stbi_is_hdr (char const *filename) +{ + FILE *f = stbi__fopen(filename, "rb"); + int result=0; + if (f) { + result = stbi_is_hdr_from_file(f); + fclose(f); + } + return result; +} + +STBIDEF int stbi_is_hdr_from_file(FILE *f) +{ + #ifndef STBI_NO_HDR + long pos = ftell(f); + int res; + stbi__context s; + stbi__start_file(&s,f); + res = stbi__hdr_test(&s); + fseek(f, pos, SEEK_SET); + return res; + #else + STBI_NOTUSED(f); + return 0; + #endif +} +#endif // !STBI_NO_STDIO + +STBIDEF int stbi_is_hdr_from_callbacks(stbi_io_callbacks const *clbk, void *user) +{ + #ifndef STBI_NO_HDR + stbi__context s; + stbi__start_callbacks(&s, (stbi_io_callbacks *) clbk, user); + return stbi__hdr_test(&s); + #else + STBI_NOTUSED(clbk); + STBI_NOTUSED(user); + return 0; + #endif +} + +#ifndef STBI_NO_LINEAR +static float stbi__l2h_gamma=2.2f, stbi__l2h_scale=1.0f; + +STBIDEF void stbi_ldr_to_hdr_gamma(float gamma) { stbi__l2h_gamma = gamma; } +STBIDEF void stbi_ldr_to_hdr_scale(float scale) { stbi__l2h_scale = scale; } +#endif + +static float stbi__h2l_gamma_i=1.0f/2.2f, stbi__h2l_scale_i=1.0f; + +STBIDEF void stbi_hdr_to_ldr_gamma(float gamma) { stbi__h2l_gamma_i = 1/gamma; } +STBIDEF void stbi_hdr_to_ldr_scale(float scale) { stbi__h2l_scale_i = 1/scale; } + + +////////////////////////////////////////////////////////////////////////////// +// +// Common code used by all image loaders +// + +enum +{ + STBI__SCAN_load=0, + STBI__SCAN_type, + STBI__SCAN_header +}; + +static void stbi__refill_buffer(stbi__context *s) +{ + int n = (s->io.read)(s->io_user_data,(char*)s->buffer_start,s->buflen); + s->callback_already_read += (int) (s->img_buffer - s->img_buffer_original); + if (n == 0) { + // at end of file, treat same as if from memory, but need to handle case + // where s->img_buffer isn't pointing to safe memory, e.g. 0-byte file + s->read_from_callbacks = 0; + s->img_buffer = s->buffer_start; + s->img_buffer_end = s->buffer_start+1; + *s->img_buffer = 0; + } else { + s->img_buffer = s->buffer_start; + s->img_buffer_end = s->buffer_start + n; + } +} + +stbi_inline static stbi_uc stbi__get8(stbi__context *s) +{ + if (s->img_buffer < s->img_buffer_end) + return *s->img_buffer++; + if (s->read_from_callbacks) { + stbi__refill_buffer(s); + return *s->img_buffer++; + } + return 0; +} + +#if defined(STBI_NO_JPEG) && defined(STBI_NO_HDR) && defined(STBI_NO_PIC) && defined(STBI_NO_PNM) +// nothing +#else +stbi_inline static int stbi__at_eof(stbi__context *s) +{ + if (s->io.read) { + if (!(s->io.eof)(s->io_user_data)) return 0; + // if feof() is true, check if buffer = end + // special case: we've only got the special 0 character at the end + if (s->read_from_callbacks == 0) return 1; + } + + return s->img_buffer >= s->img_buffer_end; +} +#endif + +#if defined(STBI_NO_JPEG) && defined(STBI_NO_PNG) && defined(STBI_NO_BMP) && defined(STBI_NO_PSD) && defined(STBI_NO_TGA) && defined(STBI_NO_GIF) && defined(STBI_NO_PIC) +// nothing +#else +static void stbi__skip(stbi__context *s, int n) +{ + if (n == 0) return; // already there! + if (n < 0) { + s->img_buffer = s->img_buffer_end; + return; + } + if (s->io.read) { + int blen = (int) (s->img_buffer_end - s->img_buffer); + if (blen < n) { + s->img_buffer = s->img_buffer_end; + (s->io.skip)(s->io_user_data, n - blen); + return; + } + } + s->img_buffer += n; +} +#endif + +#if defined(STBI_NO_PNG) && defined(STBI_NO_TGA) && defined(STBI_NO_HDR) && defined(STBI_NO_PNM) +// nothing +#else +static int stbi__getn(stbi__context *s, stbi_uc *buffer, int n) +{ + if (s->io.read) { + int blen = (int) (s->img_buffer_end - s->img_buffer); + if (blen < n) { + int res, count; + + memcpy(buffer, s->img_buffer, blen); + + count = (s->io.read)(s->io_user_data, (char*) buffer + blen, n - blen); + res = (count == (n-blen)); + s->img_buffer = s->img_buffer_end; + return res; + } + } + + if (s->img_buffer+n <= s->img_buffer_end) { + memcpy(buffer, s->img_buffer, n); + s->img_buffer += n; + return 1; + } else + return 0; +} +#endif + +#if defined(STBI_NO_JPEG) && defined(STBI_NO_PNG) && defined(STBI_NO_PSD) && defined(STBI_NO_PIC) +// nothing +#else +static int stbi__get16be(stbi__context *s) +{ + int z = stbi__get8(s); + return (z << 8) + stbi__get8(s); +} +#endif + +#if defined(STBI_NO_PNG) && defined(STBI_NO_PSD) && defined(STBI_NO_PIC) +// nothing +#else +static stbi__uint32 stbi__get32be(stbi__context *s) +{ + stbi__uint32 z = stbi__get16be(s); + return (z << 16) + stbi__get16be(s); +} +#endif + +#if defined(STBI_NO_BMP) && defined(STBI_NO_TGA) && defined(STBI_NO_GIF) +// nothing +#else +static int stbi__get16le(stbi__context *s) +{ + int z = stbi__get8(s); + return z + (stbi__get8(s) << 8); +} +#endif + +#ifndef STBI_NO_BMP +static stbi__uint32 stbi__get32le(stbi__context *s) +{ + stbi__uint32 z = stbi__get16le(s); + z += (stbi__uint32)stbi__get16le(s) << 16; + return z; +} +#endif + +#define STBI__BYTECAST(x) ((stbi_uc) ((x) & 255)) // truncate int to byte without warnings + +#if defined(STBI_NO_JPEG) && defined(STBI_NO_PNG) && defined(STBI_NO_BMP) && defined(STBI_NO_PSD) && defined(STBI_NO_TGA) && defined(STBI_NO_GIF) && defined(STBI_NO_PIC) && defined(STBI_NO_PNM) +// nothing +#else +////////////////////////////////////////////////////////////////////////////// +// +// generic converter from built-in img_n to req_comp +// individual types do this automatically as much as possible (e.g. jpeg +// does all cases internally since it needs to colorspace convert anyway, +// and it never has alpha, so very few cases ). png can automatically +// interleave an alpha=255 channel, but falls back to this for other cases +// +// assume data buffer is malloced, so malloc a new one and free that one +// only failure mode is malloc failing + +static stbi_uc stbi__compute_y(int r, int g, int b) +{ + return (stbi_uc) (((r*77) + (g*150) + (29*b)) >> 8); +} +#endif + +#if defined(STBI_NO_PNG) && defined(STBI_NO_BMP) && defined(STBI_NO_PSD) && defined(STBI_NO_TGA) && defined(STBI_NO_GIF) && defined(STBI_NO_PIC) && defined(STBI_NO_PNM) +// nothing +#else +static unsigned char *stbi__convert_format(unsigned char *data, int img_n, int req_comp, unsigned int x, unsigned int y) +{ + int i,j; + unsigned char *good; + + if (req_comp == img_n) return data; + STBI_ASSERT(req_comp >= 1 && req_comp <= 4); + + good = (unsigned char *) stbi__malloc_mad3(req_comp, x, y, 0); + if (good == NULL) { + STBI_FREE(data); + return stbi__errpuc("outofmem", "Out of memory"); + } + + for (j=0; j < (int) y; ++j) { + unsigned char *src = data + j * x * img_n ; + unsigned char *dest = good + j * x * req_comp; + + #define STBI__COMBO(a,b) ((a)*8+(b)) + #define STBI__CASE(a,b) case STBI__COMBO(a,b): for(i=x-1; i >= 0; --i, src += a, dest += b) + // convert source image with img_n components to one with req_comp components; + // avoid switch per pixel, so use switch per scanline and massive macros + switch (STBI__COMBO(img_n, req_comp)) { + STBI__CASE(1,2) { dest[0]=src[0]; dest[1]=255; } break; + STBI__CASE(1,3) { dest[0]=dest[1]=dest[2]=src[0]; } break; + STBI__CASE(1,4) { dest[0]=dest[1]=dest[2]=src[0]; dest[3]=255; } break; + STBI__CASE(2,1) { dest[0]=src[0]; } break; + STBI__CASE(2,3) { dest[0]=dest[1]=dest[2]=src[0]; } break; + STBI__CASE(2,4) { dest[0]=dest[1]=dest[2]=src[0]; dest[3]=src[1]; } break; + STBI__CASE(3,4) { dest[0]=src[0];dest[1]=src[1];dest[2]=src[2];dest[3]=255; } break; + STBI__CASE(3,1) { dest[0]=stbi__compute_y(src[0],src[1],src[2]); } break; + STBI__CASE(3,2) { dest[0]=stbi__compute_y(src[0],src[1],src[2]); dest[1] = 255; } break; + STBI__CASE(4,1) { dest[0]=stbi__compute_y(src[0],src[1],src[2]); } break; + STBI__CASE(4,2) { dest[0]=stbi__compute_y(src[0],src[1],src[2]); dest[1] = src[3]; } break; + STBI__CASE(4,3) { dest[0]=src[0];dest[1]=src[1];dest[2]=src[2]; } break; + default: STBI_ASSERT(0); STBI_FREE(data); STBI_FREE(good); return stbi__errpuc("unsupported", "Unsupported format conversion"); + } + #undef STBI__CASE + } + + STBI_FREE(data); + return good; +} +#endif + +#if defined(STBI_NO_PNG) && defined(STBI_NO_PSD) +// nothing +#else +static stbi__uint16 stbi__compute_y_16(int r, int g, int b) +{ + return (stbi__uint16) (((r*77) + (g*150) + (29*b)) >> 8); +} +#endif + +#if defined(STBI_NO_PNG) && defined(STBI_NO_PSD) +// nothing +#else +static stbi__uint16 *stbi__convert_format16(stbi__uint16 *data, int img_n, int req_comp, unsigned int x, unsigned int y) +{ + int i,j; + stbi__uint16 *good; + + if (req_comp == img_n) return data; + STBI_ASSERT(req_comp >= 1 && req_comp <= 4); + + good = (stbi__uint16 *) stbi__malloc(req_comp * x * y * 2); + if (good == NULL) { + STBI_FREE(data); + return (stbi__uint16 *) stbi__errpuc("outofmem", "Out of memory"); + } + + for (j=0; j < (int) y; ++j) { + stbi__uint16 *src = data + j * x * img_n ; + stbi__uint16 *dest = good + j * x * req_comp; + + #define STBI__COMBO(a,b) ((a)*8+(b)) + #define STBI__CASE(a,b) case STBI__COMBO(a,b): for(i=x-1; i >= 0; --i, src += a, dest += b) + // convert source image with img_n components to one with req_comp components; + // avoid switch per pixel, so use switch per scanline and massive macros + switch (STBI__COMBO(img_n, req_comp)) { + STBI__CASE(1,2) { dest[0]=src[0]; dest[1]=0xffff; } break; + STBI__CASE(1,3) { dest[0]=dest[1]=dest[2]=src[0]; } break; + STBI__CASE(1,4) { dest[0]=dest[1]=dest[2]=src[0]; dest[3]=0xffff; } break; + STBI__CASE(2,1) { dest[0]=src[0]; } break; + STBI__CASE(2,3) { dest[0]=dest[1]=dest[2]=src[0]; } break; + STBI__CASE(2,4) { dest[0]=dest[1]=dest[2]=src[0]; dest[3]=src[1]; } break; + STBI__CASE(3,4) { dest[0]=src[0];dest[1]=src[1];dest[2]=src[2];dest[3]=0xffff; } break; + STBI__CASE(3,1) { dest[0]=stbi__compute_y_16(src[0],src[1],src[2]); } break; + STBI__CASE(3,2) { dest[0]=stbi__compute_y_16(src[0],src[1],src[2]); dest[1] = 0xffff; } break; + STBI__CASE(4,1) { dest[0]=stbi__compute_y_16(src[0],src[1],src[2]); } break; + STBI__CASE(4,2) { dest[0]=stbi__compute_y_16(src[0],src[1],src[2]); dest[1] = src[3]; } break; + STBI__CASE(4,3) { dest[0]=src[0];dest[1]=src[1];dest[2]=src[2]; } break; + default: STBI_ASSERT(0); STBI_FREE(data); STBI_FREE(good); return (stbi__uint16*) stbi__errpuc("unsupported", "Unsupported format conversion"); + } + #undef STBI__CASE + } + + STBI_FREE(data); + return good; +} +#endif + +#ifndef STBI_NO_LINEAR +static float *stbi__ldr_to_hdr(stbi_uc *data, int x, int y, int comp) +{ + int i,k,n; + float *output; + if (!data) return NULL; + output = (float *) stbi__malloc_mad4(x, y, comp, sizeof(float), 0); + if (output == NULL) { STBI_FREE(data); return stbi__errpf("outofmem", "Out of memory"); } + // compute number of non-alpha components + if (comp & 1) n = comp; else n = comp-1; + for (i=0; i < x*y; ++i) { + for (k=0; k < n; ++k) { + output[i*comp + k] = (float) (pow(data[i*comp+k]/255.0f, stbi__l2h_gamma) * stbi__l2h_scale); + } + } + if (n < comp) { + for (i=0; i < x*y; ++i) { + output[i*comp + n] = data[i*comp + n]/255.0f; + } + } + STBI_FREE(data); + return output; +} +#endif + +#ifndef STBI_NO_HDR +#define stbi__float2int(x) ((int) (x)) +static stbi_uc *stbi__hdr_to_ldr(float *data, int x, int y, int comp) +{ + int i,k,n; + stbi_uc *output; + if (!data) return NULL; + output = (stbi_uc *) stbi__malloc_mad3(x, y, comp, 0); + if (output == NULL) { STBI_FREE(data); return stbi__errpuc("outofmem", "Out of memory"); } + // compute number of non-alpha components + if (comp & 1) n = comp; else n = comp-1; + for (i=0; i < x*y; ++i) { + for (k=0; k < n; ++k) { + float z = (float) pow(data[i*comp+k]*stbi__h2l_scale_i, stbi__h2l_gamma_i) * 255 + 0.5f; + if (z < 0) z = 0; + if (z > 255) z = 255; + output[i*comp + k] = (stbi_uc) stbi__float2int(z); + } + if (k < comp) { + float z = data[i*comp+k] * 255 + 0.5f; + if (z < 0) z = 0; + if (z > 255) z = 255; + output[i*comp + k] = (stbi_uc) stbi__float2int(z); + } + } + STBI_FREE(data); + return output; +} +#endif + +////////////////////////////////////////////////////////////////////////////// +// +// "baseline" JPEG/JFIF decoder +// +// simple implementation +// - doesn't support delayed output of y-dimension +// - simple interface (only one output format: 8-bit interleaved RGB) +// - doesn't try to recover corrupt jpegs +// - doesn't allow partial loading, loading multiple at once +// - still fast on x86 (copying globals into locals doesn't help x86) +// - allocates lots of intermediate memory (full size of all components) +// - non-interleaved case requires this anyway +// - allows good upsampling (see next) +// high-quality +// - upsampled channels are bilinearly interpolated, even across blocks +// - quality integer IDCT derived from IJG's 'slow' +// performance +// - fast huffman; reasonable integer IDCT +// - some SIMD kernels for common paths on targets with SSE2/NEON +// - uses a lot of intermediate memory, could cache poorly + +#ifndef STBI_NO_JPEG + +// huffman decoding acceleration +#define FAST_BITS 9 // larger handles more cases; smaller stomps less cache + +typedef struct +{ + stbi_uc fast[1 << FAST_BITS]; + // weirdly, repacking this into AoS is a 10% speed loss, instead of a win + stbi__uint16 code[256]; + stbi_uc values[256]; + stbi_uc size[257]; + unsigned int maxcode[18]; + int delta[17]; // old 'firstsymbol' - old 'firstcode' +} stbi__huffman; + +typedef struct +{ + stbi__context *s; + stbi__huffman huff_dc[4]; + stbi__huffman huff_ac[4]; + stbi__uint16 dequant[4][64]; + stbi__int16 fast_ac[4][1 << FAST_BITS]; + +// sizes for components, interleaved MCUs + int img_h_max, img_v_max; + int img_mcu_x, img_mcu_y; + int img_mcu_w, img_mcu_h; + +// definition of jpeg image component + struct + { + int id; + int h,v; + int tq; + int hd,ha; + int dc_pred; + + int x,y,w2,h2; + stbi_uc *data; + void *raw_data, *raw_coeff; + stbi_uc *linebuf; + short *coeff; // progressive only + int coeff_w, coeff_h; // number of 8x8 coefficient blocks + } img_comp[4]; + + stbi__uint32 code_buffer; // jpeg entropy-coded buffer + int code_bits; // number of valid bits + unsigned char marker; // marker seen while filling entropy buffer + int nomore; // flag if we saw a marker so must stop + + int progressive; + int spec_start; + int spec_end; + int succ_high; + int succ_low; + int eob_run; + int jfif; + int app14_color_transform; // Adobe APP14 tag + int rgb; + + int scan_n, order[4]; + int restart_interval, todo; + +// kernels + void (*idct_block_kernel)(stbi_uc *out, int out_stride, short data[64]); + void (*YCbCr_to_RGB_kernel)(stbi_uc *out, const stbi_uc *y, const stbi_uc *pcb, const stbi_uc *pcr, int count, int step); + stbi_uc *(*resample_row_hv_2_kernel)(stbi_uc *out, stbi_uc *in_near, stbi_uc *in_far, int w, int hs); +} stbi__jpeg; + +static int stbi__build_huffman(stbi__huffman *h, int *count) +{ + int i,j,k=0; + unsigned int code; + // build size list for each symbol (from JPEG spec) + for (i=0; i < 16; ++i) { + for (j=0; j < count[i]; ++j) { + h->size[k++] = (stbi_uc) (i+1); + if(k >= 257) return stbi__err("bad size list","Corrupt JPEG"); + } + } + h->size[k] = 0; + + // compute actual symbols (from jpeg spec) + code = 0; + k = 0; + for(j=1; j <= 16; ++j) { + // compute delta to add to code to compute symbol id + h->delta[j] = k - code; + if (h->size[k] == j) { + while (h->size[k] == j) + h->code[k++] = (stbi__uint16) (code++); + if (code-1 >= (1u << j)) return stbi__err("bad code lengths","Corrupt JPEG"); + } + // compute largest code + 1 for this size, preshifted as needed later + h->maxcode[j] = code << (16-j); + code <<= 1; + } + h->maxcode[j] = 0xffffffff; + + // build non-spec acceleration table; 255 is flag for not-accelerated + memset(h->fast, 255, 1 << FAST_BITS); + for (i=0; i < k; ++i) { + int s = h->size[i]; + if (s <= FAST_BITS) { + int c = h->code[i] << (FAST_BITS-s); + int m = 1 << (FAST_BITS-s); + for (j=0; j < m; ++j) { + h->fast[c+j] = (stbi_uc) i; + } + } + } + return 1; +} + +// build a table that decodes both magnitude and value of small ACs in +// one go. +static void stbi__build_fast_ac(stbi__int16 *fast_ac, stbi__huffman *h) +{ + int i; + for (i=0; i < (1 << FAST_BITS); ++i) { + stbi_uc fast = h->fast[i]; + fast_ac[i] = 0; + if (fast < 255) { + int rs = h->values[fast]; + int run = (rs >> 4) & 15; + int magbits = rs & 15; + int len = h->size[fast]; + + if (magbits && len + magbits <= FAST_BITS) { + // magnitude code followed by receive_extend code + int k = ((i << len) & ((1 << FAST_BITS) - 1)) >> (FAST_BITS - magbits); + int m = 1 << (magbits - 1); + if (k < m) k += (~0U << magbits) + 1; + // if the result is small enough, we can fit it in fast_ac table + if (k >= -128 && k <= 127) + fast_ac[i] = (stbi__int16) ((k * 256) + (run * 16) + (len + magbits)); + } + } + } +} + +static void stbi__grow_buffer_unsafe(stbi__jpeg *j) +{ + do { + unsigned int b = j->nomore ? 0 : stbi__get8(j->s); + if (b == 0xff) { + int c = stbi__get8(j->s); + while (c == 0xff) c = stbi__get8(j->s); // consume fill bytes + if (c != 0) { + j->marker = (unsigned char) c; + j->nomore = 1; + return; + } + } + j->code_buffer |= b << (24 - j->code_bits); + j->code_bits += 8; + } while (j->code_bits <= 24); +} + +// (1 << n) - 1 +static const stbi__uint32 stbi__bmask[17]={0,1,3,7,15,31,63,127,255,511,1023,2047,4095,8191,16383,32767,65535}; + +// decode a jpeg huffman value from the bitstream +stbi_inline static int stbi__jpeg_huff_decode(stbi__jpeg *j, stbi__huffman *h) +{ + unsigned int temp; + int c,k; + + if (j->code_bits < 16) stbi__grow_buffer_unsafe(j); + + // look at the top FAST_BITS and determine what symbol ID it is, + // if the code is <= FAST_BITS + c = (j->code_buffer >> (32 - FAST_BITS)) & ((1 << FAST_BITS)-1); + k = h->fast[c]; + if (k < 255) { + int s = h->size[k]; + if (s > j->code_bits) + return -1; + j->code_buffer <<= s; + j->code_bits -= s; + return h->values[k]; + } + + // naive test is to shift the code_buffer down so k bits are + // valid, then test against maxcode. To speed this up, we've + // preshifted maxcode left so that it has (16-k) 0s at the + // end; in other words, regardless of the number of bits, it + // wants to be compared against something shifted to have 16; + // that way we don't need to shift inside the loop. + temp = j->code_buffer >> 16; + for (k=FAST_BITS+1 ; ; ++k) + if (temp < h->maxcode[k]) + break; + if (k == 17) { + // error! code not found + j->code_bits -= 16; + return -1; + } + + if (k > j->code_bits) + return -1; + + // convert the huffman code to the symbol id + c = ((j->code_buffer >> (32 - k)) & stbi__bmask[k]) + h->delta[k]; + if(c < 0 || c >= 256) // symbol id out of bounds! + return -1; + STBI_ASSERT((((j->code_buffer) >> (32 - h->size[c])) & stbi__bmask[h->size[c]]) == h->code[c]); + + // convert the id to a symbol + j->code_bits -= k; + j->code_buffer <<= k; + return h->values[c]; +} + +// bias[n] = (-1<code_bits < n) stbi__grow_buffer_unsafe(j); + if (j->code_bits < n) return 0; // ran out of bits from stream, return 0s intead of continuing + + sgn = j->code_buffer >> 31; // sign bit always in MSB; 0 if MSB clear (positive), 1 if MSB set (negative) + k = stbi_lrot(j->code_buffer, n); + j->code_buffer = k & ~stbi__bmask[n]; + k &= stbi__bmask[n]; + j->code_bits -= n; + return k + (stbi__jbias[n] & (sgn - 1)); +} + +// get some unsigned bits +stbi_inline static int stbi__jpeg_get_bits(stbi__jpeg *j, int n) +{ + unsigned int k; + if (j->code_bits < n) stbi__grow_buffer_unsafe(j); + if (j->code_bits < n) return 0; // ran out of bits from stream, return 0s intead of continuing + k = stbi_lrot(j->code_buffer, n); + j->code_buffer = k & ~stbi__bmask[n]; + k &= stbi__bmask[n]; + j->code_bits -= n; + return k; +} + +stbi_inline static int stbi__jpeg_get_bit(stbi__jpeg *j) +{ + unsigned int k; + if (j->code_bits < 1) stbi__grow_buffer_unsafe(j); + if (j->code_bits < 1) return 0; // ran out of bits from stream, return 0s intead of continuing + k = j->code_buffer; + j->code_buffer <<= 1; + --j->code_bits; + return k & 0x80000000; +} + +// given a value that's at position X in the zigzag stream, +// where does it appear in the 8x8 matrix coded as row-major? +static const stbi_uc stbi__jpeg_dezigzag[64+15] = +{ + 0, 1, 8, 16, 9, 2, 3, 10, + 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, + 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, + 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, + 53, 60, 61, 54, 47, 55, 62, 63, + // let corrupt input sample past end + 63, 63, 63, 63, 63, 63, 63, 63, + 63, 63, 63, 63, 63, 63, 63 +}; + +// decode one 64-entry block-- +static int stbi__jpeg_decode_block(stbi__jpeg *j, short data[64], stbi__huffman *hdc, stbi__huffman *hac, stbi__int16 *fac, int b, stbi__uint16 *dequant) +{ + int diff,dc,k; + int t; + + if (j->code_bits < 16) stbi__grow_buffer_unsafe(j); + t = stbi__jpeg_huff_decode(j, hdc); + if (t < 0 || t > 15) return stbi__err("bad huffman code","Corrupt JPEG"); + + // 0 all the ac values now so we can do it 32-bits at a time + memset(data,0,64*sizeof(data[0])); + + diff = t ? stbi__extend_receive(j, t) : 0; + if (!stbi__addints_valid(j->img_comp[b].dc_pred, diff)) return stbi__err("bad delta","Corrupt JPEG"); + dc = j->img_comp[b].dc_pred + diff; + j->img_comp[b].dc_pred = dc; + if (!stbi__mul2shorts_valid(dc, dequant[0])) return stbi__err("can't merge dc and ac", "Corrupt JPEG"); + data[0] = (short) (dc * dequant[0]); + + // decode AC components, see JPEG spec + k = 1; + do { + unsigned int zig; + int c,r,s; + if (j->code_bits < 16) stbi__grow_buffer_unsafe(j); + c = (j->code_buffer >> (32 - FAST_BITS)) & ((1 << FAST_BITS)-1); + r = fac[c]; + if (r) { // fast-AC path + k += (r >> 4) & 15; // run + s = r & 15; // combined length + if (s > j->code_bits) return stbi__err("bad huffman code", "Combined length longer than code bits available"); + j->code_buffer <<= s; + j->code_bits -= s; + // decode into unzigzag'd location + zig = stbi__jpeg_dezigzag[k++]; + data[zig] = (short) ((r >> 8) * dequant[zig]); + } else { + int rs = stbi__jpeg_huff_decode(j, hac); + if (rs < 0) return stbi__err("bad huffman code","Corrupt JPEG"); + s = rs & 15; + r = rs >> 4; + if (s == 0) { + if (rs != 0xf0) break; // end block + k += 16; + } else { + k += r; + // decode into unzigzag'd location + zig = stbi__jpeg_dezigzag[k++]; + data[zig] = (short) (stbi__extend_receive(j,s) * dequant[zig]); + } + } + } while (k < 64); + return 1; +} + +static int stbi__jpeg_decode_block_prog_dc(stbi__jpeg *j, short data[64], stbi__huffman *hdc, int b) +{ + int diff,dc; + int t; + if (j->spec_end != 0) return stbi__err("can't merge dc and ac", "Corrupt JPEG"); + + if (j->code_bits < 16) stbi__grow_buffer_unsafe(j); + + if (j->succ_high == 0) { + // first scan for DC coefficient, must be first + memset(data,0,64*sizeof(data[0])); // 0 all the ac values now + t = stbi__jpeg_huff_decode(j, hdc); + if (t < 0 || t > 15) return stbi__err("can't merge dc and ac", "Corrupt JPEG"); + diff = t ? stbi__extend_receive(j, t) : 0; + + if (!stbi__addints_valid(j->img_comp[b].dc_pred, diff)) return stbi__err("bad delta", "Corrupt JPEG"); + dc = j->img_comp[b].dc_pred + diff; + j->img_comp[b].dc_pred = dc; + if (!stbi__mul2shorts_valid(dc, 1 << j->succ_low)) return stbi__err("can't merge dc and ac", "Corrupt JPEG"); + data[0] = (short) (dc * (1 << j->succ_low)); + } else { + // refinement scan for DC coefficient + if (stbi__jpeg_get_bit(j)) + data[0] += (short) (1 << j->succ_low); + } + return 1; +} + +// @OPTIMIZE: store non-zigzagged during the decode passes, +// and only de-zigzag when dequantizing +static int stbi__jpeg_decode_block_prog_ac(stbi__jpeg *j, short data[64], stbi__huffman *hac, stbi__int16 *fac) +{ + int k; + if (j->spec_start == 0) return stbi__err("can't merge dc and ac", "Corrupt JPEG"); + + if (j->succ_high == 0) { + int shift = j->succ_low; + + if (j->eob_run) { + --j->eob_run; + return 1; + } + + k = j->spec_start; + do { + unsigned int zig; + int c,r,s; + if (j->code_bits < 16) stbi__grow_buffer_unsafe(j); + c = (j->code_buffer >> (32 - FAST_BITS)) & ((1 << FAST_BITS)-1); + r = fac[c]; + if (r) { // fast-AC path + k += (r >> 4) & 15; // run + s = r & 15; // combined length + if (s > j->code_bits) return stbi__err("bad huffman code", "Combined length longer than code bits available"); + j->code_buffer <<= s; + j->code_bits -= s; + zig = stbi__jpeg_dezigzag[k++]; + data[zig] = (short) ((r >> 8) * (1 << shift)); + } else { + int rs = stbi__jpeg_huff_decode(j, hac); + if (rs < 0) return stbi__err("bad huffman code","Corrupt JPEG"); + s = rs & 15; + r = rs >> 4; + if (s == 0) { + if (r < 15) { + j->eob_run = (1 << r); + if (r) + j->eob_run += stbi__jpeg_get_bits(j, r); + --j->eob_run; + break; + } + k += 16; + } else { + k += r; + zig = stbi__jpeg_dezigzag[k++]; + data[zig] = (short) (stbi__extend_receive(j,s) * (1 << shift)); + } + } + } while (k <= j->spec_end); + } else { + // refinement scan for these AC coefficients + + short bit = (short) (1 << j->succ_low); + + if (j->eob_run) { + --j->eob_run; + for (k = j->spec_start; k <= j->spec_end; ++k) { + short *p = &data[stbi__jpeg_dezigzag[k]]; + if (*p != 0) + if (stbi__jpeg_get_bit(j)) + if ((*p & bit)==0) { + if (*p > 0) + *p += bit; + else + *p -= bit; + } + } + } else { + k = j->spec_start; + do { + int r,s; + int rs = stbi__jpeg_huff_decode(j, hac); // @OPTIMIZE see if we can use the fast path here, advance-by-r is so slow, eh + if (rs < 0) return stbi__err("bad huffman code","Corrupt JPEG"); + s = rs & 15; + r = rs >> 4; + if (s == 0) { + if (r < 15) { + j->eob_run = (1 << r) - 1; + if (r) + j->eob_run += stbi__jpeg_get_bits(j, r); + r = 64; // force end of block + } else { + // r=15 s=0 should write 16 0s, so we just do + // a run of 15 0s and then write s (which is 0), + // so we don't have to do anything special here + } + } else { + if (s != 1) return stbi__err("bad huffman code", "Corrupt JPEG"); + // sign bit + if (stbi__jpeg_get_bit(j)) + s = bit; + else + s = -bit; + } + + // advance by r + while (k <= j->spec_end) { + short *p = &data[stbi__jpeg_dezigzag[k++]]; + if (*p != 0) { + if (stbi__jpeg_get_bit(j)) + if ((*p & bit)==0) { + if (*p > 0) + *p += bit; + else + *p -= bit; + } + } else { + if (r == 0) { + *p = (short) s; + break; + } + --r; + } + } + } while (k <= j->spec_end); + } + } + return 1; +} + +// take a -128..127 value and stbi__clamp it and convert to 0..255 +stbi_inline static stbi_uc stbi__clamp(int x) +{ + // trick to use a single test to catch both cases + if ((unsigned int) x > 255) { + if (x < 0) return 0; + if (x > 255) return 255; + } + return (stbi_uc) x; +} + +#define stbi__f2f(x) ((int) (((x) * 4096 + 0.5))) +#define stbi__fsh(x) ((x) * 4096) + +// derived from jidctint -- DCT_ISLOW +#define STBI__IDCT_1D(s0,s1,s2,s3,s4,s5,s6,s7) \ + int t0,t1,t2,t3,p1,p2,p3,p4,p5,x0,x1,x2,x3; \ + p2 = s2; \ + p3 = s6; \ + p1 = (p2+p3) * stbi__f2f(0.5411961f); \ + t2 = p1 + p3*stbi__f2f(-1.847759065f); \ + t3 = p1 + p2*stbi__f2f( 0.765366865f); \ + p2 = s0; \ + p3 = s4; \ + t0 = stbi__fsh(p2+p3); \ + t1 = stbi__fsh(p2-p3); \ + x0 = t0+t3; \ + x3 = t0-t3; \ + x1 = t1+t2; \ + x2 = t1-t2; \ + t0 = s7; \ + t1 = s5; \ + t2 = s3; \ + t3 = s1; \ + p3 = t0+t2; \ + p4 = t1+t3; \ + p1 = t0+t3; \ + p2 = t1+t2; \ + p5 = (p3+p4)*stbi__f2f( 1.175875602f); \ + t0 = t0*stbi__f2f( 0.298631336f); \ + t1 = t1*stbi__f2f( 2.053119869f); \ + t2 = t2*stbi__f2f( 3.072711026f); \ + t3 = t3*stbi__f2f( 1.501321110f); \ + p1 = p5 + p1*stbi__f2f(-0.899976223f); \ + p2 = p5 + p2*stbi__f2f(-2.562915447f); \ + p3 = p3*stbi__f2f(-1.961570560f); \ + p4 = p4*stbi__f2f(-0.390180644f); \ + t3 += p1+p4; \ + t2 += p2+p3; \ + t1 += p2+p4; \ + t0 += p1+p3; + +static void stbi__idct_block(stbi_uc *out, int out_stride, short data[64]) +{ + int i,val[64],*v=val; + stbi_uc *o; + short *d = data; + + // columns + for (i=0; i < 8; ++i,++d, ++v) { + // if all zeroes, shortcut -- this avoids dequantizing 0s and IDCTing + if (d[ 8]==0 && d[16]==0 && d[24]==0 && d[32]==0 + && d[40]==0 && d[48]==0 && d[56]==0) { + // no shortcut 0 seconds + // (1|2|3|4|5|6|7)==0 0 seconds + // all separate -0.047 seconds + // 1 && 2|3 && 4|5 && 6|7: -0.047 seconds + int dcterm = d[0]*4; + v[0] = v[8] = v[16] = v[24] = v[32] = v[40] = v[48] = v[56] = dcterm; + } else { + STBI__IDCT_1D(d[ 0],d[ 8],d[16],d[24],d[32],d[40],d[48],d[56]) + // constants scaled things up by 1<<12; let's bring them back + // down, but keep 2 extra bits of precision + x0 += 512; x1 += 512; x2 += 512; x3 += 512; + v[ 0] = (x0+t3) >> 10; + v[56] = (x0-t3) >> 10; + v[ 8] = (x1+t2) >> 10; + v[48] = (x1-t2) >> 10; + v[16] = (x2+t1) >> 10; + v[40] = (x2-t1) >> 10; + v[24] = (x3+t0) >> 10; + v[32] = (x3-t0) >> 10; + } + } + + for (i=0, v=val, o=out; i < 8; ++i,v+=8,o+=out_stride) { + // no fast case since the first 1D IDCT spread components out + STBI__IDCT_1D(v[0],v[1],v[2],v[3],v[4],v[5],v[6],v[7]) + // constants scaled things up by 1<<12, plus we had 1<<2 from first + // loop, plus horizontal and vertical each scale by sqrt(8) so together + // we've got an extra 1<<3, so 1<<17 total we need to remove. + // so we want to round that, which means adding 0.5 * 1<<17, + // aka 65536. Also, we'll end up with -128 to 127 that we want + // to encode as 0..255 by adding 128, so we'll add that before the shift + x0 += 65536 + (128<<17); + x1 += 65536 + (128<<17); + x2 += 65536 + (128<<17); + x3 += 65536 + (128<<17); + // tried computing the shifts into temps, or'ing the temps to see + // if any were out of range, but that was slower + o[0] = stbi__clamp((x0+t3) >> 17); + o[7] = stbi__clamp((x0-t3) >> 17); + o[1] = stbi__clamp((x1+t2) >> 17); + o[6] = stbi__clamp((x1-t2) >> 17); + o[2] = stbi__clamp((x2+t1) >> 17); + o[5] = stbi__clamp((x2-t1) >> 17); + o[3] = stbi__clamp((x3+t0) >> 17); + o[4] = stbi__clamp((x3-t0) >> 17); + } +} + +#ifdef STBI_SSE2 +// sse2 integer IDCT. not the fastest possible implementation but it +// produces bit-identical results to the generic C version so it's +// fully "transparent". +static void stbi__idct_simd(stbi_uc *out, int out_stride, short data[64]) +{ + // This is constructed to match our regular (generic) integer IDCT exactly. + __m128i row0, row1, row2, row3, row4, row5, row6, row7; + __m128i tmp; + + // dot product constant: even elems=x, odd elems=y + #define dct_const(x,y) _mm_setr_epi16((x),(y),(x),(y),(x),(y),(x),(y)) + + // out(0) = c0[even]*x + c0[odd]*y (c0, x, y 16-bit, out 32-bit) + // out(1) = c1[even]*x + c1[odd]*y + #define dct_rot(out0,out1, x,y,c0,c1) \ + __m128i c0##lo = _mm_unpacklo_epi16((x),(y)); \ + __m128i c0##hi = _mm_unpackhi_epi16((x),(y)); \ + __m128i out0##_l = _mm_madd_epi16(c0##lo, c0); \ + __m128i out0##_h = _mm_madd_epi16(c0##hi, c0); \ + __m128i out1##_l = _mm_madd_epi16(c0##lo, c1); \ + __m128i out1##_h = _mm_madd_epi16(c0##hi, c1) + + // out = in << 12 (in 16-bit, out 32-bit) + #define dct_widen(out, in) \ + __m128i out##_l = _mm_srai_epi32(_mm_unpacklo_epi16(_mm_setzero_si128(), (in)), 4); \ + __m128i out##_h = _mm_srai_epi32(_mm_unpackhi_epi16(_mm_setzero_si128(), (in)), 4) + + // wide add + #define dct_wadd(out, a, b) \ + __m128i out##_l = _mm_add_epi32(a##_l, b##_l); \ + __m128i out##_h = _mm_add_epi32(a##_h, b##_h) + + // wide sub + #define dct_wsub(out, a, b) \ + __m128i out##_l = _mm_sub_epi32(a##_l, b##_l); \ + __m128i out##_h = _mm_sub_epi32(a##_h, b##_h) + + // butterfly a/b, add bias, then shift by "s" and pack + #define dct_bfly32o(out0, out1, a,b,bias,s) \ + { \ + __m128i abiased_l = _mm_add_epi32(a##_l, bias); \ + __m128i abiased_h = _mm_add_epi32(a##_h, bias); \ + dct_wadd(sum, abiased, b); \ + dct_wsub(dif, abiased, b); \ + out0 = _mm_packs_epi32(_mm_srai_epi32(sum_l, s), _mm_srai_epi32(sum_h, s)); \ + out1 = _mm_packs_epi32(_mm_srai_epi32(dif_l, s), _mm_srai_epi32(dif_h, s)); \ + } + + // 8-bit interleave step (for transposes) + #define dct_interleave8(a, b) \ + tmp = a; \ + a = _mm_unpacklo_epi8(a, b); \ + b = _mm_unpackhi_epi8(tmp, b) + + // 16-bit interleave step (for transposes) + #define dct_interleave16(a, b) \ + tmp = a; \ + a = _mm_unpacklo_epi16(a, b); \ + b = _mm_unpackhi_epi16(tmp, b) + + #define dct_pass(bias,shift) \ + { \ + /* even part */ \ + dct_rot(t2e,t3e, row2,row6, rot0_0,rot0_1); \ + __m128i sum04 = _mm_add_epi16(row0, row4); \ + __m128i dif04 = _mm_sub_epi16(row0, row4); \ + dct_widen(t0e, sum04); \ + dct_widen(t1e, dif04); \ + dct_wadd(x0, t0e, t3e); \ + dct_wsub(x3, t0e, t3e); \ + dct_wadd(x1, t1e, t2e); \ + dct_wsub(x2, t1e, t2e); \ + /* odd part */ \ + dct_rot(y0o,y2o, row7,row3, rot2_0,rot2_1); \ + dct_rot(y1o,y3o, row5,row1, rot3_0,rot3_1); \ + __m128i sum17 = _mm_add_epi16(row1, row7); \ + __m128i sum35 = _mm_add_epi16(row3, row5); \ + dct_rot(y4o,y5o, sum17,sum35, rot1_0,rot1_1); \ + dct_wadd(x4, y0o, y4o); \ + dct_wadd(x5, y1o, y5o); \ + dct_wadd(x6, y2o, y5o); \ + dct_wadd(x7, y3o, y4o); \ + dct_bfly32o(row0,row7, x0,x7,bias,shift); \ + dct_bfly32o(row1,row6, x1,x6,bias,shift); \ + dct_bfly32o(row2,row5, x2,x5,bias,shift); \ + dct_bfly32o(row3,row4, x3,x4,bias,shift); \ + } + + __m128i rot0_0 = dct_const(stbi__f2f(0.5411961f), stbi__f2f(0.5411961f) + stbi__f2f(-1.847759065f)); + __m128i rot0_1 = dct_const(stbi__f2f(0.5411961f) + stbi__f2f( 0.765366865f), stbi__f2f(0.5411961f)); + __m128i rot1_0 = dct_const(stbi__f2f(1.175875602f) + stbi__f2f(-0.899976223f), stbi__f2f(1.175875602f)); + __m128i rot1_1 = dct_const(stbi__f2f(1.175875602f), stbi__f2f(1.175875602f) + stbi__f2f(-2.562915447f)); + __m128i rot2_0 = dct_const(stbi__f2f(-1.961570560f) + stbi__f2f( 0.298631336f), stbi__f2f(-1.961570560f)); + __m128i rot2_1 = dct_const(stbi__f2f(-1.961570560f), stbi__f2f(-1.961570560f) + stbi__f2f( 3.072711026f)); + __m128i rot3_0 = dct_const(stbi__f2f(-0.390180644f) + stbi__f2f( 2.053119869f), stbi__f2f(-0.390180644f)); + __m128i rot3_1 = dct_const(stbi__f2f(-0.390180644f), stbi__f2f(-0.390180644f) + stbi__f2f( 1.501321110f)); + + // rounding biases in column/row passes, see stbi__idct_block for explanation. + __m128i bias_0 = _mm_set1_epi32(512); + __m128i bias_1 = _mm_set1_epi32(65536 + (128<<17)); + + // load + row0 = _mm_load_si128((const __m128i *) (data + 0*8)); + row1 = _mm_load_si128((const __m128i *) (data + 1*8)); + row2 = _mm_load_si128((const __m128i *) (data + 2*8)); + row3 = _mm_load_si128((const __m128i *) (data + 3*8)); + row4 = _mm_load_si128((const __m128i *) (data + 4*8)); + row5 = _mm_load_si128((const __m128i *) (data + 5*8)); + row6 = _mm_load_si128((const __m128i *) (data + 6*8)); + row7 = _mm_load_si128((const __m128i *) (data + 7*8)); + + // column pass + dct_pass(bias_0, 10); + + { + // 16bit 8x8 transpose pass 1 + dct_interleave16(row0, row4); + dct_interleave16(row1, row5); + dct_interleave16(row2, row6); + dct_interleave16(row3, row7); + + // transpose pass 2 + dct_interleave16(row0, row2); + dct_interleave16(row1, row3); + dct_interleave16(row4, row6); + dct_interleave16(row5, row7); + + // transpose pass 3 + dct_interleave16(row0, row1); + dct_interleave16(row2, row3); + dct_interleave16(row4, row5); + dct_interleave16(row6, row7); + } + + // row pass + dct_pass(bias_1, 17); + + { + // pack + __m128i p0 = _mm_packus_epi16(row0, row1); // a0a1a2a3...a7b0b1b2b3...b7 + __m128i p1 = _mm_packus_epi16(row2, row3); + __m128i p2 = _mm_packus_epi16(row4, row5); + __m128i p3 = _mm_packus_epi16(row6, row7); + + // 8bit 8x8 transpose pass 1 + dct_interleave8(p0, p2); // a0e0a1e1... + dct_interleave8(p1, p3); // c0g0c1g1... + + // transpose pass 2 + dct_interleave8(p0, p1); // a0c0e0g0... + dct_interleave8(p2, p3); // b0d0f0h0... + + // transpose pass 3 + dct_interleave8(p0, p2); // a0b0c0d0... + dct_interleave8(p1, p3); // a4b4c4d4... + + // store + _mm_storel_epi64((__m128i *) out, p0); out += out_stride; + _mm_storel_epi64((__m128i *) out, _mm_shuffle_epi32(p0, 0x4e)); out += out_stride; + _mm_storel_epi64((__m128i *) out, p2); out += out_stride; + _mm_storel_epi64((__m128i *) out, _mm_shuffle_epi32(p2, 0x4e)); out += out_stride; + _mm_storel_epi64((__m128i *) out, p1); out += out_stride; + _mm_storel_epi64((__m128i *) out, _mm_shuffle_epi32(p1, 0x4e)); out += out_stride; + _mm_storel_epi64((__m128i *) out, p3); out += out_stride; + _mm_storel_epi64((__m128i *) out, _mm_shuffle_epi32(p3, 0x4e)); + } + +#undef dct_const +#undef dct_rot +#undef dct_widen +#undef dct_wadd +#undef dct_wsub +#undef dct_bfly32o +#undef dct_interleave8 +#undef dct_interleave16 +#undef dct_pass +} + +#endif // STBI_SSE2 + +#ifdef STBI_NEON + +// NEON integer IDCT. should produce bit-identical +// results to the generic C version. +static void stbi__idct_simd(stbi_uc *out, int out_stride, short data[64]) +{ + int16x8_t row0, row1, row2, row3, row4, row5, row6, row7; + + int16x4_t rot0_0 = vdup_n_s16(stbi__f2f(0.5411961f)); + int16x4_t rot0_1 = vdup_n_s16(stbi__f2f(-1.847759065f)); + int16x4_t rot0_2 = vdup_n_s16(stbi__f2f( 0.765366865f)); + int16x4_t rot1_0 = vdup_n_s16(stbi__f2f( 1.175875602f)); + int16x4_t rot1_1 = vdup_n_s16(stbi__f2f(-0.899976223f)); + int16x4_t rot1_2 = vdup_n_s16(stbi__f2f(-2.562915447f)); + int16x4_t rot2_0 = vdup_n_s16(stbi__f2f(-1.961570560f)); + int16x4_t rot2_1 = vdup_n_s16(stbi__f2f(-0.390180644f)); + int16x4_t rot3_0 = vdup_n_s16(stbi__f2f( 0.298631336f)); + int16x4_t rot3_1 = vdup_n_s16(stbi__f2f( 2.053119869f)); + int16x4_t rot3_2 = vdup_n_s16(stbi__f2f( 3.072711026f)); + int16x4_t rot3_3 = vdup_n_s16(stbi__f2f( 1.501321110f)); + +#define dct_long_mul(out, inq, coeff) \ + int32x4_t out##_l = vmull_s16(vget_low_s16(inq), coeff); \ + int32x4_t out##_h = vmull_s16(vget_high_s16(inq), coeff) + +#define dct_long_mac(out, acc, inq, coeff) \ + int32x4_t out##_l = vmlal_s16(acc##_l, vget_low_s16(inq), coeff); \ + int32x4_t out##_h = vmlal_s16(acc##_h, vget_high_s16(inq), coeff) + +#define dct_widen(out, inq) \ + int32x4_t out##_l = vshll_n_s16(vget_low_s16(inq), 12); \ + int32x4_t out##_h = vshll_n_s16(vget_high_s16(inq), 12) + +// wide add +#define dct_wadd(out, a, b) \ + int32x4_t out##_l = vaddq_s32(a##_l, b##_l); \ + int32x4_t out##_h = vaddq_s32(a##_h, b##_h) + +// wide sub +#define dct_wsub(out, a, b) \ + int32x4_t out##_l = vsubq_s32(a##_l, b##_l); \ + int32x4_t out##_h = vsubq_s32(a##_h, b##_h) + +// butterfly a/b, then shift using "shiftop" by "s" and pack +#define dct_bfly32o(out0,out1, a,b,shiftop,s) \ + { \ + dct_wadd(sum, a, b); \ + dct_wsub(dif, a, b); \ + out0 = vcombine_s16(shiftop(sum_l, s), shiftop(sum_h, s)); \ + out1 = vcombine_s16(shiftop(dif_l, s), shiftop(dif_h, s)); \ + } + +#define dct_pass(shiftop, shift) \ + { \ + /* even part */ \ + int16x8_t sum26 = vaddq_s16(row2, row6); \ + dct_long_mul(p1e, sum26, rot0_0); \ + dct_long_mac(t2e, p1e, row6, rot0_1); \ + dct_long_mac(t3e, p1e, row2, rot0_2); \ + int16x8_t sum04 = vaddq_s16(row0, row4); \ + int16x8_t dif04 = vsubq_s16(row0, row4); \ + dct_widen(t0e, sum04); \ + dct_widen(t1e, dif04); \ + dct_wadd(x0, t0e, t3e); \ + dct_wsub(x3, t0e, t3e); \ + dct_wadd(x1, t1e, t2e); \ + dct_wsub(x2, t1e, t2e); \ + /* odd part */ \ + int16x8_t sum15 = vaddq_s16(row1, row5); \ + int16x8_t sum17 = vaddq_s16(row1, row7); \ + int16x8_t sum35 = vaddq_s16(row3, row5); \ + int16x8_t sum37 = vaddq_s16(row3, row7); \ + int16x8_t sumodd = vaddq_s16(sum17, sum35); \ + dct_long_mul(p5o, sumodd, rot1_0); \ + dct_long_mac(p1o, p5o, sum17, rot1_1); \ + dct_long_mac(p2o, p5o, sum35, rot1_2); \ + dct_long_mul(p3o, sum37, rot2_0); \ + dct_long_mul(p4o, sum15, rot2_1); \ + dct_wadd(sump13o, p1o, p3o); \ + dct_wadd(sump24o, p2o, p4o); \ + dct_wadd(sump23o, p2o, p3o); \ + dct_wadd(sump14o, p1o, p4o); \ + dct_long_mac(x4, sump13o, row7, rot3_0); \ + dct_long_mac(x5, sump24o, row5, rot3_1); \ + dct_long_mac(x6, sump23o, row3, rot3_2); \ + dct_long_mac(x7, sump14o, row1, rot3_3); \ + dct_bfly32o(row0,row7, x0,x7,shiftop,shift); \ + dct_bfly32o(row1,row6, x1,x6,shiftop,shift); \ + dct_bfly32o(row2,row5, x2,x5,shiftop,shift); \ + dct_bfly32o(row3,row4, x3,x4,shiftop,shift); \ + } + + // load + row0 = vld1q_s16(data + 0*8); + row1 = vld1q_s16(data + 1*8); + row2 = vld1q_s16(data + 2*8); + row3 = vld1q_s16(data + 3*8); + row4 = vld1q_s16(data + 4*8); + row5 = vld1q_s16(data + 5*8); + row6 = vld1q_s16(data + 6*8); + row7 = vld1q_s16(data + 7*8); + + // add DC bias + row0 = vaddq_s16(row0, vsetq_lane_s16(1024, vdupq_n_s16(0), 0)); + + // column pass + dct_pass(vrshrn_n_s32, 10); + + // 16bit 8x8 transpose + { +// these three map to a single VTRN.16, VTRN.32, and VSWP, respectively. +// whether compilers actually get this is another story, sadly. +#define dct_trn16(x, y) { int16x8x2_t t = vtrnq_s16(x, y); x = t.val[0]; y = t.val[1]; } +#define dct_trn32(x, y) { int32x4x2_t t = vtrnq_s32(vreinterpretq_s32_s16(x), vreinterpretq_s32_s16(y)); x = vreinterpretq_s16_s32(t.val[0]); y = vreinterpretq_s16_s32(t.val[1]); } +#define dct_trn64(x, y) { int16x8_t x0 = x; int16x8_t y0 = y; x = vcombine_s16(vget_low_s16(x0), vget_low_s16(y0)); y = vcombine_s16(vget_high_s16(x0), vget_high_s16(y0)); } + + // pass 1 + dct_trn16(row0, row1); // a0b0a2b2a4b4a6b6 + dct_trn16(row2, row3); + dct_trn16(row4, row5); + dct_trn16(row6, row7); + + // pass 2 + dct_trn32(row0, row2); // a0b0c0d0a4b4c4d4 + dct_trn32(row1, row3); + dct_trn32(row4, row6); + dct_trn32(row5, row7); + + // pass 3 + dct_trn64(row0, row4); // a0b0c0d0e0f0g0h0 + dct_trn64(row1, row5); + dct_trn64(row2, row6); + dct_trn64(row3, row7); + +#undef dct_trn16 +#undef dct_trn32 +#undef dct_trn64 + } + + // row pass + // vrshrn_n_s32 only supports shifts up to 16, we need + // 17. so do a non-rounding shift of 16 first then follow + // up with a rounding shift by 1. + dct_pass(vshrn_n_s32, 16); + + { + // pack and round + uint8x8_t p0 = vqrshrun_n_s16(row0, 1); + uint8x8_t p1 = vqrshrun_n_s16(row1, 1); + uint8x8_t p2 = vqrshrun_n_s16(row2, 1); + uint8x8_t p3 = vqrshrun_n_s16(row3, 1); + uint8x8_t p4 = vqrshrun_n_s16(row4, 1); + uint8x8_t p5 = vqrshrun_n_s16(row5, 1); + uint8x8_t p6 = vqrshrun_n_s16(row6, 1); + uint8x8_t p7 = vqrshrun_n_s16(row7, 1); + + // again, these can translate into one instruction, but often don't. +#define dct_trn8_8(x, y) { uint8x8x2_t t = vtrn_u8(x, y); x = t.val[0]; y = t.val[1]; } +#define dct_trn8_16(x, y) { uint16x4x2_t t = vtrn_u16(vreinterpret_u16_u8(x), vreinterpret_u16_u8(y)); x = vreinterpret_u8_u16(t.val[0]); y = vreinterpret_u8_u16(t.val[1]); } +#define dct_trn8_32(x, y) { uint32x2x2_t t = vtrn_u32(vreinterpret_u32_u8(x), vreinterpret_u32_u8(y)); x = vreinterpret_u8_u32(t.val[0]); y = vreinterpret_u8_u32(t.val[1]); } + + // sadly can't use interleaved stores here since we only write + // 8 bytes to each scan line! + + // 8x8 8-bit transpose pass 1 + dct_trn8_8(p0, p1); + dct_trn8_8(p2, p3); + dct_trn8_8(p4, p5); + dct_trn8_8(p6, p7); + + // pass 2 + dct_trn8_16(p0, p2); + dct_trn8_16(p1, p3); + dct_trn8_16(p4, p6); + dct_trn8_16(p5, p7); + + // pass 3 + dct_trn8_32(p0, p4); + dct_trn8_32(p1, p5); + dct_trn8_32(p2, p6); + dct_trn8_32(p3, p7); + + // store + vst1_u8(out, p0); out += out_stride; + vst1_u8(out, p1); out += out_stride; + vst1_u8(out, p2); out += out_stride; + vst1_u8(out, p3); out += out_stride; + vst1_u8(out, p4); out += out_stride; + vst1_u8(out, p5); out += out_stride; + vst1_u8(out, p6); out += out_stride; + vst1_u8(out, p7); + +#undef dct_trn8_8 +#undef dct_trn8_16 +#undef dct_trn8_32 + } + +#undef dct_long_mul +#undef dct_long_mac +#undef dct_widen +#undef dct_wadd +#undef dct_wsub +#undef dct_bfly32o +#undef dct_pass +} + +#endif // STBI_NEON + +#define STBI__MARKER_none 0xff +// if there's a pending marker from the entropy stream, return that +// otherwise, fetch from the stream and get a marker. if there's no +// marker, return 0xff, which is never a valid marker value +static stbi_uc stbi__get_marker(stbi__jpeg *j) +{ + stbi_uc x; + if (j->marker != STBI__MARKER_none) { x = j->marker; j->marker = STBI__MARKER_none; return x; } + x = stbi__get8(j->s); + if (x != 0xff) return STBI__MARKER_none; + while (x == 0xff) + x = stbi__get8(j->s); // consume repeated 0xff fill bytes + return x; +} + +// in each scan, we'll have scan_n components, and the order +// of the components is specified by order[] +#define STBI__RESTART(x) ((x) >= 0xd0 && (x) <= 0xd7) + +// after a restart interval, stbi__jpeg_reset the entropy decoder and +// the dc prediction +static void stbi__jpeg_reset(stbi__jpeg *j) +{ + j->code_bits = 0; + j->code_buffer = 0; + j->nomore = 0; + j->img_comp[0].dc_pred = j->img_comp[1].dc_pred = j->img_comp[2].dc_pred = j->img_comp[3].dc_pred = 0; + j->marker = STBI__MARKER_none; + j->todo = j->restart_interval ? j->restart_interval : 0x7fffffff; + j->eob_run = 0; + // no more than 1<<31 MCUs if no restart_interal? that's plenty safe, + // since we don't even allow 1<<30 pixels +} + +static int stbi__parse_entropy_coded_data(stbi__jpeg *z) +{ + stbi__jpeg_reset(z); + if (!z->progressive) { + if (z->scan_n == 1) { + int i,j; + STBI_SIMD_ALIGN(short, data[64]); + int n = z->order[0]; + // non-interleaved data, we just need to process one block at a time, + // in trivial scanline order + // number of blocks to do just depends on how many actual "pixels" this + // component has, independent of interleaved MCU blocking and such + int w = (z->img_comp[n].x+7) >> 3; + int h = (z->img_comp[n].y+7) >> 3; + for (j=0; j < h; ++j) { + for (i=0; i < w; ++i) { + int ha = z->img_comp[n].ha; + if (!stbi__jpeg_decode_block(z, data, z->huff_dc+z->img_comp[n].hd, z->huff_ac+ha, z->fast_ac[ha], n, z->dequant[z->img_comp[n].tq])) return 0; + z->idct_block_kernel(z->img_comp[n].data+z->img_comp[n].w2*j*8+i*8, z->img_comp[n].w2, data); + // every data block is an MCU, so countdown the restart interval + if (--z->todo <= 0) { + if (z->code_bits < 24) stbi__grow_buffer_unsafe(z); + // if it's NOT a restart, then just bail, so we get corrupt data + // rather than no data + if (!STBI__RESTART(z->marker)) return 1; + stbi__jpeg_reset(z); + } + } + } + return 1; + } else { // interleaved + int i,j,k,x,y; + STBI_SIMD_ALIGN(short, data[64]); + for (j=0; j < z->img_mcu_y; ++j) { + for (i=0; i < z->img_mcu_x; ++i) { + // scan an interleaved mcu... process scan_n components in order + for (k=0; k < z->scan_n; ++k) { + int n = z->order[k]; + // scan out an mcu's worth of this component; that's just determined + // by the basic H and V specified for the component + for (y=0; y < z->img_comp[n].v; ++y) { + for (x=0; x < z->img_comp[n].h; ++x) { + int x2 = (i*z->img_comp[n].h + x)*8; + int y2 = (j*z->img_comp[n].v + y)*8; + int ha = z->img_comp[n].ha; + if (!stbi__jpeg_decode_block(z, data, z->huff_dc+z->img_comp[n].hd, z->huff_ac+ha, z->fast_ac[ha], n, z->dequant[z->img_comp[n].tq])) return 0; + z->idct_block_kernel(z->img_comp[n].data+z->img_comp[n].w2*y2+x2, z->img_comp[n].w2, data); + } + } + } + // after all interleaved components, that's an interleaved MCU, + // so now count down the restart interval + if (--z->todo <= 0) { + if (z->code_bits < 24) stbi__grow_buffer_unsafe(z); + if (!STBI__RESTART(z->marker)) return 1; + stbi__jpeg_reset(z); + } + } + } + return 1; + } + } else { + if (z->scan_n == 1) { + int i,j; + int n = z->order[0]; + // non-interleaved data, we just need to process one block at a time, + // in trivial scanline order + // number of blocks to do just depends on how many actual "pixels" this + // component has, independent of interleaved MCU blocking and such + int w = (z->img_comp[n].x+7) >> 3; + int h = (z->img_comp[n].y+7) >> 3; + for (j=0; j < h; ++j) { + for (i=0; i < w; ++i) { + short *data = z->img_comp[n].coeff + 64 * (i + j * z->img_comp[n].coeff_w); + if (z->spec_start == 0) { + if (!stbi__jpeg_decode_block_prog_dc(z, data, &z->huff_dc[z->img_comp[n].hd], n)) + return 0; + } else { + int ha = z->img_comp[n].ha; + if (!stbi__jpeg_decode_block_prog_ac(z, data, &z->huff_ac[ha], z->fast_ac[ha])) + return 0; + } + // every data block is an MCU, so countdown the restart interval + if (--z->todo <= 0) { + if (z->code_bits < 24) stbi__grow_buffer_unsafe(z); + if (!STBI__RESTART(z->marker)) return 1; + stbi__jpeg_reset(z); + } + } + } + return 1; + } else { // interleaved + int i,j,k,x,y; + for (j=0; j < z->img_mcu_y; ++j) { + for (i=0; i < z->img_mcu_x; ++i) { + // scan an interleaved mcu... process scan_n components in order + for (k=0; k < z->scan_n; ++k) { + int n = z->order[k]; + // scan out an mcu's worth of this component; that's just determined + // by the basic H and V specified for the component + for (y=0; y < z->img_comp[n].v; ++y) { + for (x=0; x < z->img_comp[n].h; ++x) { + int x2 = (i*z->img_comp[n].h + x); + int y2 = (j*z->img_comp[n].v + y); + short *data = z->img_comp[n].coeff + 64 * (x2 + y2 * z->img_comp[n].coeff_w); + if (!stbi__jpeg_decode_block_prog_dc(z, data, &z->huff_dc[z->img_comp[n].hd], n)) + return 0; + } + } + } + // after all interleaved components, that's an interleaved MCU, + // so now count down the restart interval + if (--z->todo <= 0) { + if (z->code_bits < 24) stbi__grow_buffer_unsafe(z); + if (!STBI__RESTART(z->marker)) return 1; + stbi__jpeg_reset(z); + } + } + } + return 1; + } + } +} + +static void stbi__jpeg_dequantize(short *data, stbi__uint16 *dequant) +{ + int i; + for (i=0; i < 64; ++i) + data[i] *= dequant[i]; +} + +static void stbi__jpeg_finish(stbi__jpeg *z) +{ + if (z->progressive) { + // dequantize and idct the data + int i,j,n; + for (n=0; n < z->s->img_n; ++n) { + int w = (z->img_comp[n].x+7) >> 3; + int h = (z->img_comp[n].y+7) >> 3; + for (j=0; j < h; ++j) { + for (i=0; i < w; ++i) { + short *data = z->img_comp[n].coeff + 64 * (i + j * z->img_comp[n].coeff_w); + stbi__jpeg_dequantize(data, z->dequant[z->img_comp[n].tq]); + z->idct_block_kernel(z->img_comp[n].data+z->img_comp[n].w2*j*8+i*8, z->img_comp[n].w2, data); + } + } + } + } +} + +static int stbi__process_marker(stbi__jpeg *z, int m) +{ + int L; + switch (m) { + case STBI__MARKER_none: // no marker found + return stbi__err("expected marker","Corrupt JPEG"); + + case 0xDD: // DRI - specify restart interval + if (stbi__get16be(z->s) != 4) return stbi__err("bad DRI len","Corrupt JPEG"); + z->restart_interval = stbi__get16be(z->s); + return 1; + + case 0xDB: // DQT - define quantization table + L = stbi__get16be(z->s)-2; + while (L > 0) { + int q = stbi__get8(z->s); + int p = q >> 4, sixteen = (p != 0); + int t = q & 15,i; + if (p != 0 && p != 1) return stbi__err("bad DQT type","Corrupt JPEG"); + if (t > 3) return stbi__err("bad DQT table","Corrupt JPEG"); + + for (i=0; i < 64; ++i) + z->dequant[t][stbi__jpeg_dezigzag[i]] = (stbi__uint16)(sixteen ? stbi__get16be(z->s) : stbi__get8(z->s)); + L -= (sixteen ? 129 : 65); + } + return L==0; + + case 0xC4: // DHT - define huffman table + L = stbi__get16be(z->s)-2; + while (L > 0) { + stbi_uc *v; + int sizes[16],i,n=0; + int q = stbi__get8(z->s); + int tc = q >> 4; + int th = q & 15; + if (tc > 1 || th > 3) return stbi__err("bad DHT header","Corrupt JPEG"); + for (i=0; i < 16; ++i) { + sizes[i] = stbi__get8(z->s); + n += sizes[i]; + } + if(n > 256) return stbi__err("bad DHT header","Corrupt JPEG"); // Loop over i < n would write past end of values! + L -= 17; + if (tc == 0) { + if (!stbi__build_huffman(z->huff_dc+th, sizes)) return 0; + v = z->huff_dc[th].values; + } else { + if (!stbi__build_huffman(z->huff_ac+th, sizes)) return 0; + v = z->huff_ac[th].values; + } + for (i=0; i < n; ++i) + v[i] = stbi__get8(z->s); + if (tc != 0) + stbi__build_fast_ac(z->fast_ac[th], z->huff_ac + th); + L -= n; + } + return L==0; + } + + // check for comment block or APP blocks + if ((m >= 0xE0 && m <= 0xEF) || m == 0xFE) { + L = stbi__get16be(z->s); + if (L < 2) { + if (m == 0xFE) + return stbi__err("bad COM len","Corrupt JPEG"); + else + return stbi__err("bad APP len","Corrupt JPEG"); + } + L -= 2; + + if (m == 0xE0 && L >= 5) { // JFIF APP0 segment + static const unsigned char tag[5] = {'J','F','I','F','\0'}; + int ok = 1; + int i; + for (i=0; i < 5; ++i) + if (stbi__get8(z->s) != tag[i]) + ok = 0; + L -= 5; + if (ok) + z->jfif = 1; + } else if (m == 0xEE && L >= 12) { // Adobe APP14 segment + static const unsigned char tag[6] = {'A','d','o','b','e','\0'}; + int ok = 1; + int i; + for (i=0; i < 6; ++i) + if (stbi__get8(z->s) != tag[i]) + ok = 0; + L -= 6; + if (ok) { + stbi__get8(z->s); // version + stbi__get16be(z->s); // flags0 + stbi__get16be(z->s); // flags1 + z->app14_color_transform = stbi__get8(z->s); // color transform + L -= 6; + } + } + + stbi__skip(z->s, L); + return 1; + } + + return stbi__err("unknown marker","Corrupt JPEG"); +} + +// after we see SOS +static int stbi__process_scan_header(stbi__jpeg *z) +{ + int i; + int Ls = stbi__get16be(z->s); + z->scan_n = stbi__get8(z->s); + if (z->scan_n < 1 || z->scan_n > 4 || z->scan_n > (int) z->s->img_n) return stbi__err("bad SOS component count","Corrupt JPEG"); + if (Ls != 6+2*z->scan_n) return stbi__err("bad SOS len","Corrupt JPEG"); + for (i=0; i < z->scan_n; ++i) { + int id = stbi__get8(z->s), which; + int q = stbi__get8(z->s); + for (which = 0; which < z->s->img_n; ++which) + if (z->img_comp[which].id == id) + break; + if (which == z->s->img_n) return 0; // no match + z->img_comp[which].hd = q >> 4; if (z->img_comp[which].hd > 3) return stbi__err("bad DC huff","Corrupt JPEG"); + z->img_comp[which].ha = q & 15; if (z->img_comp[which].ha > 3) return stbi__err("bad AC huff","Corrupt JPEG"); + z->order[i] = which; + } + + { + int aa; + z->spec_start = stbi__get8(z->s); + z->spec_end = stbi__get8(z->s); // should be 63, but might be 0 + aa = stbi__get8(z->s); + z->succ_high = (aa >> 4); + z->succ_low = (aa & 15); + if (z->progressive) { + if (z->spec_start > 63 || z->spec_end > 63 || z->spec_start > z->spec_end || z->succ_high > 13 || z->succ_low > 13) + return stbi__err("bad SOS", "Corrupt JPEG"); + } else { + if (z->spec_start != 0) return stbi__err("bad SOS","Corrupt JPEG"); + if (z->succ_high != 0 || z->succ_low != 0) return stbi__err("bad SOS","Corrupt JPEG"); + z->spec_end = 63; + } + } + + return 1; +} + +static int stbi__free_jpeg_components(stbi__jpeg *z, int ncomp, int why) +{ + int i; + for (i=0; i < ncomp; ++i) { + if (z->img_comp[i].raw_data) { + STBI_FREE(z->img_comp[i].raw_data); + z->img_comp[i].raw_data = NULL; + z->img_comp[i].data = NULL; + } + if (z->img_comp[i].raw_coeff) { + STBI_FREE(z->img_comp[i].raw_coeff); + z->img_comp[i].raw_coeff = 0; + z->img_comp[i].coeff = 0; + } + if (z->img_comp[i].linebuf) { + STBI_FREE(z->img_comp[i].linebuf); + z->img_comp[i].linebuf = NULL; + } + } + return why; +} + +static int stbi__process_frame_header(stbi__jpeg *z, int scan) +{ + stbi__context *s = z->s; + int Lf,p,i,q, h_max=1,v_max=1,c; + Lf = stbi__get16be(s); if (Lf < 11) return stbi__err("bad SOF len","Corrupt JPEG"); // JPEG + p = stbi__get8(s); if (p != 8) return stbi__err("only 8-bit","JPEG format not supported: 8-bit only"); // JPEG baseline + s->img_y = stbi__get16be(s); if (s->img_y == 0) return stbi__err("no header height", "JPEG format not supported: delayed height"); // Legal, but we don't handle it--but neither does IJG + s->img_x = stbi__get16be(s); if (s->img_x == 0) return stbi__err("0 width","Corrupt JPEG"); // JPEG requires + if (s->img_y > STBI_MAX_DIMENSIONS) return stbi__err("too large","Very large image (corrupt?)"); + if (s->img_x > STBI_MAX_DIMENSIONS) return stbi__err("too large","Very large image (corrupt?)"); + c = stbi__get8(s); + if (c != 3 && c != 1 && c != 4) return stbi__err("bad component count","Corrupt JPEG"); + s->img_n = c; + for (i=0; i < c; ++i) { + z->img_comp[i].data = NULL; + z->img_comp[i].linebuf = NULL; + } + + if (Lf != 8+3*s->img_n) return stbi__err("bad SOF len","Corrupt JPEG"); + + z->rgb = 0; + for (i=0; i < s->img_n; ++i) { + static const unsigned char rgb[3] = { 'R', 'G', 'B' }; + z->img_comp[i].id = stbi__get8(s); + if (s->img_n == 3 && z->img_comp[i].id == rgb[i]) + ++z->rgb; + q = stbi__get8(s); + z->img_comp[i].h = (q >> 4); if (!z->img_comp[i].h || z->img_comp[i].h > 4) return stbi__err("bad H","Corrupt JPEG"); + z->img_comp[i].v = q & 15; if (!z->img_comp[i].v || z->img_comp[i].v > 4) return stbi__err("bad V","Corrupt JPEG"); + z->img_comp[i].tq = stbi__get8(s); if (z->img_comp[i].tq > 3) return stbi__err("bad TQ","Corrupt JPEG"); + } + + if (scan != STBI__SCAN_load) return 1; + + if (!stbi__mad3sizes_valid(s->img_x, s->img_y, s->img_n, 0)) return stbi__err("too large", "Image too large to decode"); + + for (i=0; i < s->img_n; ++i) { + if (z->img_comp[i].h > h_max) h_max = z->img_comp[i].h; + if (z->img_comp[i].v > v_max) v_max = z->img_comp[i].v; + } + + // check that plane subsampling factors are integer ratios; our resamplers can't deal with fractional ratios + // and I've never seen a non-corrupted JPEG file actually use them + for (i=0; i < s->img_n; ++i) { + if (h_max % z->img_comp[i].h != 0) return stbi__err("bad H","Corrupt JPEG"); + if (v_max % z->img_comp[i].v != 0) return stbi__err("bad V","Corrupt JPEG"); + } + + // compute interleaved mcu info + z->img_h_max = h_max; + z->img_v_max = v_max; + z->img_mcu_w = h_max * 8; + z->img_mcu_h = v_max * 8; + // these sizes can't be more than 17 bits + z->img_mcu_x = (s->img_x + z->img_mcu_w-1) / z->img_mcu_w; + z->img_mcu_y = (s->img_y + z->img_mcu_h-1) / z->img_mcu_h; + + for (i=0; i < s->img_n; ++i) { + // number of effective pixels (e.g. for non-interleaved MCU) + z->img_comp[i].x = (s->img_x * z->img_comp[i].h + h_max-1) / h_max; + z->img_comp[i].y = (s->img_y * z->img_comp[i].v + v_max-1) / v_max; + // to simplify generation, we'll allocate enough memory to decode + // the bogus oversized data from using interleaved MCUs and their + // big blocks (e.g. a 16x16 iMCU on an image of width 33); we won't + // discard the extra data until colorspace conversion + // + // img_mcu_x, img_mcu_y: <=17 bits; comp[i].h and .v are <=4 (checked earlier) + // so these muls can't overflow with 32-bit ints (which we require) + z->img_comp[i].w2 = z->img_mcu_x * z->img_comp[i].h * 8; + z->img_comp[i].h2 = z->img_mcu_y * z->img_comp[i].v * 8; + z->img_comp[i].coeff = 0; + z->img_comp[i].raw_coeff = 0; + z->img_comp[i].linebuf = NULL; + z->img_comp[i].raw_data = stbi__malloc_mad2(z->img_comp[i].w2, z->img_comp[i].h2, 15); + if (z->img_comp[i].raw_data == NULL) + return stbi__free_jpeg_components(z, i+1, stbi__err("outofmem", "Out of memory")); + // align blocks for idct using mmx/sse + z->img_comp[i].data = (stbi_uc*) (((size_t) z->img_comp[i].raw_data + 15) & ~15); + if (z->progressive) { + // w2, h2 are multiples of 8 (see above) + z->img_comp[i].coeff_w = z->img_comp[i].w2 / 8; + z->img_comp[i].coeff_h = z->img_comp[i].h2 / 8; + z->img_comp[i].raw_coeff = stbi__malloc_mad3(z->img_comp[i].w2, z->img_comp[i].h2, sizeof(short), 15); + if (z->img_comp[i].raw_coeff == NULL) + return stbi__free_jpeg_components(z, i+1, stbi__err("outofmem", "Out of memory")); + z->img_comp[i].coeff = (short*) (((size_t) z->img_comp[i].raw_coeff + 15) & ~15); + } + } + + return 1; +} + +// use comparisons since in some cases we handle more than one case (e.g. SOF) +#define stbi__DNL(x) ((x) == 0xdc) +#define stbi__SOI(x) ((x) == 0xd8) +#define stbi__EOI(x) ((x) == 0xd9) +#define stbi__SOF(x) ((x) == 0xc0 || (x) == 0xc1 || (x) == 0xc2) +#define stbi__SOS(x) ((x) == 0xda) + +#define stbi__SOF_progressive(x) ((x) == 0xc2) + +static int stbi__decode_jpeg_header(stbi__jpeg *z, int scan) +{ + int m; + z->jfif = 0; + z->app14_color_transform = -1; // valid values are 0,1,2 + z->marker = STBI__MARKER_none; // initialize cached marker to empty + m = stbi__get_marker(z); + if (!stbi__SOI(m)) return stbi__err("no SOI","Corrupt JPEG"); + if (scan == STBI__SCAN_type) return 1; + m = stbi__get_marker(z); + while (!stbi__SOF(m)) { + if (!stbi__process_marker(z,m)) return 0; + m = stbi__get_marker(z); + while (m == STBI__MARKER_none) { + // some files have extra padding after their blocks, so ok, we'll scan + if (stbi__at_eof(z->s)) return stbi__err("no SOF", "Corrupt JPEG"); + m = stbi__get_marker(z); + } + } + z->progressive = stbi__SOF_progressive(m); + if (!stbi__process_frame_header(z, scan)) return 0; + return 1; +} + +static int stbi__skip_jpeg_junk_at_end(stbi__jpeg *j) +{ + // some JPEGs have junk at end, skip over it but if we find what looks + // like a valid marker, resume there + while (!stbi__at_eof(j->s)) { + int x = stbi__get8(j->s); + while (x == 255) { // might be a marker + if (stbi__at_eof(j->s)) return STBI__MARKER_none; + x = stbi__get8(j->s); + if (x != 0x00 && x != 0xff) { + // not a stuffed zero or lead-in to another marker, looks + // like an actual marker, return it + return x; + } + // stuffed zero has x=0 now which ends the loop, meaning we go + // back to regular scan loop. + // repeated 0xff keeps trying to read the next byte of the marker. + } + } + return STBI__MARKER_none; +} + +// decode image to YCbCr format +static int stbi__decode_jpeg_image(stbi__jpeg *j) +{ + int m; + for (m = 0; m < 4; m++) { + j->img_comp[m].raw_data = NULL; + j->img_comp[m].raw_coeff = NULL; + } + j->restart_interval = 0; + if (!stbi__decode_jpeg_header(j, STBI__SCAN_load)) return 0; + m = stbi__get_marker(j); + while (!stbi__EOI(m)) { + if (stbi__SOS(m)) { + if (!stbi__process_scan_header(j)) return 0; + if (!stbi__parse_entropy_coded_data(j)) return 0; + if (j->marker == STBI__MARKER_none ) { + j->marker = stbi__skip_jpeg_junk_at_end(j); + // if we reach eof without hitting a marker, stbi__get_marker() below will fail and we'll eventually return 0 + } + m = stbi__get_marker(j); + if (STBI__RESTART(m)) + m = stbi__get_marker(j); + } else if (stbi__DNL(m)) { + int Ld = stbi__get16be(j->s); + stbi__uint32 NL = stbi__get16be(j->s); + if (Ld != 4) return stbi__err("bad DNL len", "Corrupt JPEG"); + if (NL != j->s->img_y) return stbi__err("bad DNL height", "Corrupt JPEG"); + m = stbi__get_marker(j); + } else { + if (!stbi__process_marker(j, m)) return 1; + m = stbi__get_marker(j); + } + } + if (j->progressive) + stbi__jpeg_finish(j); + return 1; +} + +// static jfif-centered resampling (across block boundaries) + +typedef stbi_uc *(*resample_row_func)(stbi_uc *out, stbi_uc *in0, stbi_uc *in1, + int w, int hs); + +#define stbi__div4(x) ((stbi_uc) ((x) >> 2)) + +static stbi_uc *resample_row_1(stbi_uc *out, stbi_uc *in_near, stbi_uc *in_far, int w, int hs) +{ + STBI_NOTUSED(out); + STBI_NOTUSED(in_far); + STBI_NOTUSED(w); + STBI_NOTUSED(hs); + return in_near; +} + +static stbi_uc* stbi__resample_row_v_2(stbi_uc *out, stbi_uc *in_near, stbi_uc *in_far, int w, int hs) +{ + // need to generate two samples vertically for every one in input + int i; + STBI_NOTUSED(hs); + for (i=0; i < w; ++i) + out[i] = stbi__div4(3*in_near[i] + in_far[i] + 2); + return out; +} + +static stbi_uc* stbi__resample_row_h_2(stbi_uc *out, stbi_uc *in_near, stbi_uc *in_far, int w, int hs) +{ + // need to generate two samples horizontally for every one in input + int i; + stbi_uc *input = in_near; + + if (w == 1) { + // if only one sample, can't do any interpolation + out[0] = out[1] = input[0]; + return out; + } + + out[0] = input[0]; + out[1] = stbi__div4(input[0]*3 + input[1] + 2); + for (i=1; i < w-1; ++i) { + int n = 3*input[i]+2; + out[i*2+0] = stbi__div4(n+input[i-1]); + out[i*2+1] = stbi__div4(n+input[i+1]); + } + out[i*2+0] = stbi__div4(input[w-2]*3 + input[w-1] + 2); + out[i*2+1] = input[w-1]; + + STBI_NOTUSED(in_far); + STBI_NOTUSED(hs); + + return out; +} + +#define stbi__div16(x) ((stbi_uc) ((x) >> 4)) + +static stbi_uc *stbi__resample_row_hv_2(stbi_uc *out, stbi_uc *in_near, stbi_uc *in_far, int w, int hs) +{ + // need to generate 2x2 samples for every one in input + int i,t0,t1; + if (w == 1) { + out[0] = out[1] = stbi__div4(3*in_near[0] + in_far[0] + 2); + return out; + } + + t1 = 3*in_near[0] + in_far[0]; + out[0] = stbi__div4(t1+2); + for (i=1; i < w; ++i) { + t0 = t1; + t1 = 3*in_near[i]+in_far[i]; + out[i*2-1] = stbi__div16(3*t0 + t1 + 8); + out[i*2 ] = stbi__div16(3*t1 + t0 + 8); + } + out[w*2-1] = stbi__div4(t1+2); + + STBI_NOTUSED(hs); + + return out; +} + +#if defined(STBI_SSE2) || defined(STBI_NEON) +static stbi_uc *stbi__resample_row_hv_2_simd(stbi_uc *out, stbi_uc *in_near, stbi_uc *in_far, int w, int hs) +{ + // need to generate 2x2 samples for every one in input + int i=0,t0,t1; + + if (w == 1) { + out[0] = out[1] = stbi__div4(3*in_near[0] + in_far[0] + 2); + return out; + } + + t1 = 3*in_near[0] + in_far[0]; + // process groups of 8 pixels for as long as we can. + // note we can't handle the last pixel in a row in this loop + // because we need to handle the filter boundary conditions. + for (; i < ((w-1) & ~7); i += 8) { +#if defined(STBI_SSE2) + // load and perform the vertical filtering pass + // this uses 3*x + y = 4*x + (y - x) + __m128i zero = _mm_setzero_si128(); + __m128i farb = _mm_loadl_epi64((__m128i *) (in_far + i)); + __m128i nearb = _mm_loadl_epi64((__m128i *) (in_near + i)); + __m128i farw = _mm_unpacklo_epi8(farb, zero); + __m128i nearw = _mm_unpacklo_epi8(nearb, zero); + __m128i diff = _mm_sub_epi16(farw, nearw); + __m128i nears = _mm_slli_epi16(nearw, 2); + __m128i curr = _mm_add_epi16(nears, diff); // current row + + // horizontal filter works the same based on shifted vers of current + // row. "prev" is current row shifted right by 1 pixel; we need to + // insert the previous pixel value (from t1). + // "next" is current row shifted left by 1 pixel, with first pixel + // of next block of 8 pixels added in. + __m128i prv0 = _mm_slli_si128(curr, 2); + __m128i nxt0 = _mm_srli_si128(curr, 2); + __m128i prev = _mm_insert_epi16(prv0, t1, 0); + __m128i next = _mm_insert_epi16(nxt0, 3*in_near[i+8] + in_far[i+8], 7); + + // horizontal filter, polyphase implementation since it's convenient: + // even pixels = 3*cur + prev = cur*4 + (prev - cur) + // odd pixels = 3*cur + next = cur*4 + (next - cur) + // note the shared term. + __m128i bias = _mm_set1_epi16(8); + __m128i curs = _mm_slli_epi16(curr, 2); + __m128i prvd = _mm_sub_epi16(prev, curr); + __m128i nxtd = _mm_sub_epi16(next, curr); + __m128i curb = _mm_add_epi16(curs, bias); + __m128i even = _mm_add_epi16(prvd, curb); + __m128i odd = _mm_add_epi16(nxtd, curb); + + // interleave even and odd pixels, then undo scaling. + __m128i int0 = _mm_unpacklo_epi16(even, odd); + __m128i int1 = _mm_unpackhi_epi16(even, odd); + __m128i de0 = _mm_srli_epi16(int0, 4); + __m128i de1 = _mm_srli_epi16(int1, 4); + + // pack and write output + __m128i outv = _mm_packus_epi16(de0, de1); + _mm_storeu_si128((__m128i *) (out + i*2), outv); +#elif defined(STBI_NEON) + // load and perform the vertical filtering pass + // this uses 3*x + y = 4*x + (y - x) + uint8x8_t farb = vld1_u8(in_far + i); + uint8x8_t nearb = vld1_u8(in_near + i); + int16x8_t diff = vreinterpretq_s16_u16(vsubl_u8(farb, nearb)); + int16x8_t nears = vreinterpretq_s16_u16(vshll_n_u8(nearb, 2)); + int16x8_t curr = vaddq_s16(nears, diff); // current row + + // horizontal filter works the same based on shifted vers of current + // row. "prev" is current row shifted right by 1 pixel; we need to + // insert the previous pixel value (from t1). + // "next" is current row shifted left by 1 pixel, with first pixel + // of next block of 8 pixels added in. + int16x8_t prv0 = vextq_s16(curr, curr, 7); + int16x8_t nxt0 = vextq_s16(curr, curr, 1); + int16x8_t prev = vsetq_lane_s16(t1, prv0, 0); + int16x8_t next = vsetq_lane_s16(3*in_near[i+8] + in_far[i+8], nxt0, 7); + + // horizontal filter, polyphase implementation since it's convenient: + // even pixels = 3*cur + prev = cur*4 + (prev - cur) + // odd pixels = 3*cur + next = cur*4 + (next - cur) + // note the shared term. + int16x8_t curs = vshlq_n_s16(curr, 2); + int16x8_t prvd = vsubq_s16(prev, curr); + int16x8_t nxtd = vsubq_s16(next, curr); + int16x8_t even = vaddq_s16(curs, prvd); + int16x8_t odd = vaddq_s16(curs, nxtd); + + // undo scaling and round, then store with even/odd phases interleaved + uint8x8x2_t o; + o.val[0] = vqrshrun_n_s16(even, 4); + o.val[1] = vqrshrun_n_s16(odd, 4); + vst2_u8(out + i*2, o); +#endif + + // "previous" value for next iter + t1 = 3*in_near[i+7] + in_far[i+7]; + } + + t0 = t1; + t1 = 3*in_near[i] + in_far[i]; + out[i*2] = stbi__div16(3*t1 + t0 + 8); + + for (++i; i < w; ++i) { + t0 = t1; + t1 = 3*in_near[i]+in_far[i]; + out[i*2-1] = stbi__div16(3*t0 + t1 + 8); + out[i*2 ] = stbi__div16(3*t1 + t0 + 8); + } + out[w*2-1] = stbi__div4(t1+2); + + STBI_NOTUSED(hs); + + return out; +} +#endif + +static stbi_uc *stbi__resample_row_generic(stbi_uc *out, stbi_uc *in_near, stbi_uc *in_far, int w, int hs) +{ + // resample with nearest-neighbor + int i,j; + STBI_NOTUSED(in_far); + for (i=0; i < w; ++i) + for (j=0; j < hs; ++j) + out[i*hs+j] = in_near[i]; + return out; +} + +// this is a reduced-precision calculation of YCbCr-to-RGB introduced +// to make sure the code produces the same results in both SIMD and scalar +#define stbi__float2fixed(x) (((int) ((x) * 4096.0f + 0.5f)) << 8) +static void stbi__YCbCr_to_RGB_row(stbi_uc *out, const stbi_uc *y, const stbi_uc *pcb, const stbi_uc *pcr, int count, int step) +{ + int i; + for (i=0; i < count; ++i) { + int y_fixed = (y[i] << 20) + (1<<19); // rounding + int r,g,b; + int cr = pcr[i] - 128; + int cb = pcb[i] - 128; + r = y_fixed + cr* stbi__float2fixed(1.40200f); + g = y_fixed + (cr*-stbi__float2fixed(0.71414f)) + ((cb*-stbi__float2fixed(0.34414f)) & 0xffff0000); + b = y_fixed + cb* stbi__float2fixed(1.77200f); + r >>= 20; + g >>= 20; + b >>= 20; + if ((unsigned) r > 255) { if (r < 0) r = 0; else r = 255; } + if ((unsigned) g > 255) { if (g < 0) g = 0; else g = 255; } + if ((unsigned) b > 255) { if (b < 0) b = 0; else b = 255; } + out[0] = (stbi_uc)r; + out[1] = (stbi_uc)g; + out[2] = (stbi_uc)b; + out[3] = 255; + out += step; + } +} + +#if defined(STBI_SSE2) || defined(STBI_NEON) +static void stbi__YCbCr_to_RGB_simd(stbi_uc *out, stbi_uc const *y, stbi_uc const *pcb, stbi_uc const *pcr, int count, int step) +{ + int i = 0; + +#ifdef STBI_SSE2 + // step == 3 is pretty ugly on the final interleave, and i'm not convinced + // it's useful in practice (you wouldn't use it for textures, for example). + // so just accelerate step == 4 case. + if (step == 4) { + // this is a fairly straightforward implementation and not super-optimized. + __m128i signflip = _mm_set1_epi8(-0x80); + __m128i cr_const0 = _mm_set1_epi16( (short) ( 1.40200f*4096.0f+0.5f)); + __m128i cr_const1 = _mm_set1_epi16( - (short) ( 0.71414f*4096.0f+0.5f)); + __m128i cb_const0 = _mm_set1_epi16( - (short) ( 0.34414f*4096.0f+0.5f)); + __m128i cb_const1 = _mm_set1_epi16( (short) ( 1.77200f*4096.0f+0.5f)); + __m128i y_bias = _mm_set1_epi8((char) (unsigned char) 128); + __m128i xw = _mm_set1_epi16(255); // alpha channel + + for (; i+7 < count; i += 8) { + // load + __m128i y_bytes = _mm_loadl_epi64((__m128i *) (y+i)); + __m128i cr_bytes = _mm_loadl_epi64((__m128i *) (pcr+i)); + __m128i cb_bytes = _mm_loadl_epi64((__m128i *) (pcb+i)); + __m128i cr_biased = _mm_xor_si128(cr_bytes, signflip); // -128 + __m128i cb_biased = _mm_xor_si128(cb_bytes, signflip); // -128 + + // unpack to short (and left-shift cr, cb by 8) + __m128i yw = _mm_unpacklo_epi8(y_bias, y_bytes); + __m128i crw = _mm_unpacklo_epi8(_mm_setzero_si128(), cr_biased); + __m128i cbw = _mm_unpacklo_epi8(_mm_setzero_si128(), cb_biased); + + // color transform + __m128i yws = _mm_srli_epi16(yw, 4); + __m128i cr0 = _mm_mulhi_epi16(cr_const0, crw); + __m128i cb0 = _mm_mulhi_epi16(cb_const0, cbw); + __m128i cb1 = _mm_mulhi_epi16(cbw, cb_const1); + __m128i cr1 = _mm_mulhi_epi16(crw, cr_const1); + __m128i rws = _mm_add_epi16(cr0, yws); + __m128i gwt = _mm_add_epi16(cb0, yws); + __m128i bws = _mm_add_epi16(yws, cb1); + __m128i gws = _mm_add_epi16(gwt, cr1); + + // descale + __m128i rw = _mm_srai_epi16(rws, 4); + __m128i bw = _mm_srai_epi16(bws, 4); + __m128i gw = _mm_srai_epi16(gws, 4); + + // back to byte, set up for transpose + __m128i brb = _mm_packus_epi16(rw, bw); + __m128i gxb = _mm_packus_epi16(gw, xw); + + // transpose to interleave channels + __m128i t0 = _mm_unpacklo_epi8(brb, gxb); + __m128i t1 = _mm_unpackhi_epi8(brb, gxb); + __m128i o0 = _mm_unpacklo_epi16(t0, t1); + __m128i o1 = _mm_unpackhi_epi16(t0, t1); + + // store + _mm_storeu_si128((__m128i *) (out + 0), o0); + _mm_storeu_si128((__m128i *) (out + 16), o1); + out += 32; + } + } +#endif + +#ifdef STBI_NEON + // in this version, step=3 support would be easy to add. but is there demand? + if (step == 4) { + // this is a fairly straightforward implementation and not super-optimized. + uint8x8_t signflip = vdup_n_u8(0x80); + int16x8_t cr_const0 = vdupq_n_s16( (short) ( 1.40200f*4096.0f+0.5f)); + int16x8_t cr_const1 = vdupq_n_s16( - (short) ( 0.71414f*4096.0f+0.5f)); + int16x8_t cb_const0 = vdupq_n_s16( - (short) ( 0.34414f*4096.0f+0.5f)); + int16x8_t cb_const1 = vdupq_n_s16( (short) ( 1.77200f*4096.0f+0.5f)); + + for (; i+7 < count; i += 8) { + // load + uint8x8_t y_bytes = vld1_u8(y + i); + uint8x8_t cr_bytes = vld1_u8(pcr + i); + uint8x8_t cb_bytes = vld1_u8(pcb + i); + int8x8_t cr_biased = vreinterpret_s8_u8(vsub_u8(cr_bytes, signflip)); + int8x8_t cb_biased = vreinterpret_s8_u8(vsub_u8(cb_bytes, signflip)); + + // expand to s16 + int16x8_t yws = vreinterpretq_s16_u16(vshll_n_u8(y_bytes, 4)); + int16x8_t crw = vshll_n_s8(cr_biased, 7); + int16x8_t cbw = vshll_n_s8(cb_biased, 7); + + // color transform + int16x8_t cr0 = vqdmulhq_s16(crw, cr_const0); + int16x8_t cb0 = vqdmulhq_s16(cbw, cb_const0); + int16x8_t cr1 = vqdmulhq_s16(crw, cr_const1); + int16x8_t cb1 = vqdmulhq_s16(cbw, cb_const1); + int16x8_t rws = vaddq_s16(yws, cr0); + int16x8_t gws = vaddq_s16(vaddq_s16(yws, cb0), cr1); + int16x8_t bws = vaddq_s16(yws, cb1); + + // undo scaling, round, convert to byte + uint8x8x4_t o; + o.val[0] = vqrshrun_n_s16(rws, 4); + o.val[1] = vqrshrun_n_s16(gws, 4); + o.val[2] = vqrshrun_n_s16(bws, 4); + o.val[3] = vdup_n_u8(255); + + // store, interleaving r/g/b/a + vst4_u8(out, o); + out += 8*4; + } + } +#endif + + for (; i < count; ++i) { + int y_fixed = (y[i] << 20) + (1<<19); // rounding + int r,g,b; + int cr = pcr[i] - 128; + int cb = pcb[i] - 128; + r = y_fixed + cr* stbi__float2fixed(1.40200f); + g = y_fixed + cr*-stbi__float2fixed(0.71414f) + ((cb*-stbi__float2fixed(0.34414f)) & 0xffff0000); + b = y_fixed + cb* stbi__float2fixed(1.77200f); + r >>= 20; + g >>= 20; + b >>= 20; + if ((unsigned) r > 255) { if (r < 0) r = 0; else r = 255; } + if ((unsigned) g > 255) { if (g < 0) g = 0; else g = 255; } + if ((unsigned) b > 255) { if (b < 0) b = 0; else b = 255; } + out[0] = (stbi_uc)r; + out[1] = (stbi_uc)g; + out[2] = (stbi_uc)b; + out[3] = 255; + out += step; + } +} +#endif + +// set up the kernels +static void stbi__setup_jpeg(stbi__jpeg *j) +{ + j->idct_block_kernel = stbi__idct_block; + j->YCbCr_to_RGB_kernel = stbi__YCbCr_to_RGB_row; + j->resample_row_hv_2_kernel = stbi__resample_row_hv_2; + +#ifdef STBI_SSE2 + if (stbi__sse2_available()) { + j->idct_block_kernel = stbi__idct_simd; + j->YCbCr_to_RGB_kernel = stbi__YCbCr_to_RGB_simd; + j->resample_row_hv_2_kernel = stbi__resample_row_hv_2_simd; + } +#endif + +#ifdef STBI_NEON + j->idct_block_kernel = stbi__idct_simd; + j->YCbCr_to_RGB_kernel = stbi__YCbCr_to_RGB_simd; + j->resample_row_hv_2_kernel = stbi__resample_row_hv_2_simd; +#endif +} + +// clean up the temporary component buffers +static void stbi__cleanup_jpeg(stbi__jpeg *j) +{ + stbi__free_jpeg_components(j, j->s->img_n, 0); +} + +typedef struct +{ + resample_row_func resample; + stbi_uc *line0,*line1; + int hs,vs; // expansion factor in each axis + int w_lores; // horizontal pixels pre-expansion + int ystep; // how far through vertical expansion we are + int ypos; // which pre-expansion row we're on +} stbi__resample; + +// fast 0..255 * 0..255 => 0..255 rounded multiplication +static stbi_uc stbi__blinn_8x8(stbi_uc x, stbi_uc y) +{ + unsigned int t = x*y + 128; + return (stbi_uc) ((t + (t >>8)) >> 8); +} + +static stbi_uc *load_jpeg_image(stbi__jpeg *z, int *out_x, int *out_y, int *comp, int req_comp) +{ + int n, decode_n, is_rgb; + z->s->img_n = 0; // make stbi__cleanup_jpeg safe + + // validate req_comp + if (req_comp < 0 || req_comp > 4) return stbi__errpuc("bad req_comp", "Internal error"); + + // load a jpeg image from whichever source, but leave in YCbCr format + if (!stbi__decode_jpeg_image(z)) { stbi__cleanup_jpeg(z); return NULL; } + + // determine actual number of components to generate + n = req_comp ? req_comp : z->s->img_n >= 3 ? 3 : 1; + + is_rgb = z->s->img_n == 3 && (z->rgb == 3 || (z->app14_color_transform == 0 && !z->jfif)); + + if (z->s->img_n == 3 && n < 3 && !is_rgb) + decode_n = 1; + else + decode_n = z->s->img_n; + + // nothing to do if no components requested; check this now to avoid + // accessing uninitialized coutput[0] later + if (decode_n <= 0) { stbi__cleanup_jpeg(z); return NULL; } + + // resample and color-convert + { + int k; + unsigned int i,j; + stbi_uc *output; + stbi_uc *coutput[4] = { NULL, NULL, NULL, NULL }; + + stbi__resample res_comp[4]; + + for (k=0; k < decode_n; ++k) { + stbi__resample *r = &res_comp[k]; + + // allocate line buffer big enough for upsampling off the edges + // with upsample factor of 4 + z->img_comp[k].linebuf = (stbi_uc *) stbi__malloc(z->s->img_x + 3); + if (!z->img_comp[k].linebuf) { stbi__cleanup_jpeg(z); return stbi__errpuc("outofmem", "Out of memory"); } + + r->hs = z->img_h_max / z->img_comp[k].h; + r->vs = z->img_v_max / z->img_comp[k].v; + r->ystep = r->vs >> 1; + r->w_lores = (z->s->img_x + r->hs-1) / r->hs; + r->ypos = 0; + r->line0 = r->line1 = z->img_comp[k].data; + + if (r->hs == 1 && r->vs == 1) r->resample = resample_row_1; + else if (r->hs == 1 && r->vs == 2) r->resample = stbi__resample_row_v_2; + else if (r->hs == 2 && r->vs == 1) r->resample = stbi__resample_row_h_2; + else if (r->hs == 2 && r->vs == 2) r->resample = z->resample_row_hv_2_kernel; + else r->resample = stbi__resample_row_generic; + } + + // can't error after this so, this is safe + output = (stbi_uc *) stbi__malloc_mad3(n, z->s->img_x, z->s->img_y, 1); + if (!output) { stbi__cleanup_jpeg(z); return stbi__errpuc("outofmem", "Out of memory"); } + + // now go ahead and resample + for (j=0; j < z->s->img_y; ++j) { + stbi_uc *out = output + n * z->s->img_x * j; + for (k=0; k < decode_n; ++k) { + stbi__resample *r = &res_comp[k]; + int y_bot = r->ystep >= (r->vs >> 1); + coutput[k] = r->resample(z->img_comp[k].linebuf, + y_bot ? r->line1 : r->line0, + y_bot ? r->line0 : r->line1, + r->w_lores, r->hs); + if (++r->ystep >= r->vs) { + r->ystep = 0; + r->line0 = r->line1; + if (++r->ypos < z->img_comp[k].y) + r->line1 += z->img_comp[k].w2; + } + } + if (n >= 3) { + stbi_uc *y = coutput[0]; + if (z->s->img_n == 3) { + if (is_rgb) { + for (i=0; i < z->s->img_x; ++i) { + out[0] = y[i]; + out[1] = coutput[1][i]; + out[2] = coutput[2][i]; + out[3] = 255; + out += n; + } + } else { + z->YCbCr_to_RGB_kernel(out, y, coutput[1], coutput[2], z->s->img_x, n); + } + } else if (z->s->img_n == 4) { + if (z->app14_color_transform == 0) { // CMYK + for (i=0; i < z->s->img_x; ++i) { + stbi_uc m = coutput[3][i]; + out[0] = stbi__blinn_8x8(coutput[0][i], m); + out[1] = stbi__blinn_8x8(coutput[1][i], m); + out[2] = stbi__blinn_8x8(coutput[2][i], m); + out[3] = 255; + out += n; + } + } else if (z->app14_color_transform == 2) { // YCCK + z->YCbCr_to_RGB_kernel(out, y, coutput[1], coutput[2], z->s->img_x, n); + for (i=0; i < z->s->img_x; ++i) { + stbi_uc m = coutput[3][i]; + out[0] = stbi__blinn_8x8(255 - out[0], m); + out[1] = stbi__blinn_8x8(255 - out[1], m); + out[2] = stbi__blinn_8x8(255 - out[2], m); + out += n; + } + } else { // YCbCr + alpha? Ignore the fourth channel for now + z->YCbCr_to_RGB_kernel(out, y, coutput[1], coutput[2], z->s->img_x, n); + } + } else + for (i=0; i < z->s->img_x; ++i) { + out[0] = out[1] = out[2] = y[i]; + out[3] = 255; // not used if n==3 + out += n; + } + } else { + if (is_rgb) { + if (n == 1) + for (i=0; i < z->s->img_x; ++i) + *out++ = stbi__compute_y(coutput[0][i], coutput[1][i], coutput[2][i]); + else { + for (i=0; i < z->s->img_x; ++i, out += 2) { + out[0] = stbi__compute_y(coutput[0][i], coutput[1][i], coutput[2][i]); + out[1] = 255; + } + } + } else if (z->s->img_n == 4 && z->app14_color_transform == 0) { + for (i=0; i < z->s->img_x; ++i) { + stbi_uc m = coutput[3][i]; + stbi_uc r = stbi__blinn_8x8(coutput[0][i], m); + stbi_uc g = stbi__blinn_8x8(coutput[1][i], m); + stbi_uc b = stbi__blinn_8x8(coutput[2][i], m); + out[0] = stbi__compute_y(r, g, b); + out[1] = 255; + out += n; + } + } else if (z->s->img_n == 4 && z->app14_color_transform == 2) { + for (i=0; i < z->s->img_x; ++i) { + out[0] = stbi__blinn_8x8(255 - coutput[0][i], coutput[3][i]); + out[1] = 255; + out += n; + } + } else { + stbi_uc *y = coutput[0]; + if (n == 1) + for (i=0; i < z->s->img_x; ++i) out[i] = y[i]; + else + for (i=0; i < z->s->img_x; ++i) { *out++ = y[i]; *out++ = 255; } + } + } + } + stbi__cleanup_jpeg(z); + *out_x = z->s->img_x; + *out_y = z->s->img_y; + if (comp) *comp = z->s->img_n >= 3 ? 3 : 1; // report original components, not output + return output; + } +} + +static void *stbi__jpeg_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri) +{ + unsigned char* result; + stbi__jpeg* j = (stbi__jpeg*) stbi__malloc(sizeof(stbi__jpeg)); + if (!j) return stbi__errpuc("outofmem", "Out of memory"); + memset(j, 0, sizeof(stbi__jpeg)); + STBI_NOTUSED(ri); + j->s = s; + stbi__setup_jpeg(j); + result = load_jpeg_image(j, x,y,comp,req_comp); + STBI_FREE(j); + return result; +} + +static int stbi__jpeg_test(stbi__context *s) +{ + int r; + stbi__jpeg* j = (stbi__jpeg*)stbi__malloc(sizeof(stbi__jpeg)); + if (!j) return stbi__err("outofmem", "Out of memory"); + memset(j, 0, sizeof(stbi__jpeg)); + j->s = s; + stbi__setup_jpeg(j); + r = stbi__decode_jpeg_header(j, STBI__SCAN_type); + stbi__rewind(s); + STBI_FREE(j); + return r; +} + +static int stbi__jpeg_info_raw(stbi__jpeg *j, int *x, int *y, int *comp) +{ + if (!stbi__decode_jpeg_header(j, STBI__SCAN_header)) { + stbi__rewind( j->s ); + return 0; + } + if (x) *x = j->s->img_x; + if (y) *y = j->s->img_y; + if (comp) *comp = j->s->img_n >= 3 ? 3 : 1; + return 1; +} + +static int stbi__jpeg_info(stbi__context *s, int *x, int *y, int *comp) +{ + int result; + stbi__jpeg* j = (stbi__jpeg*) (stbi__malloc(sizeof(stbi__jpeg))); + if (!j) return stbi__err("outofmem", "Out of memory"); + memset(j, 0, sizeof(stbi__jpeg)); + j->s = s; + result = stbi__jpeg_info_raw(j, x, y, comp); + STBI_FREE(j); + return result; +} +#endif + +// public domain zlib decode v0.2 Sean Barrett 2006-11-18 +// simple implementation +// - all input must be provided in an upfront buffer +// - all output is written to a single output buffer (can malloc/realloc) +// performance +// - fast huffman + +#ifndef STBI_NO_ZLIB + +// fast-way is faster to check than jpeg huffman, but slow way is slower +#define STBI__ZFAST_BITS 9 // accelerate all cases in default tables +#define STBI__ZFAST_MASK ((1 << STBI__ZFAST_BITS) - 1) +#define STBI__ZNSYMS 288 // number of symbols in literal/length alphabet + +// zlib-style huffman encoding +// (jpegs packs from left, zlib from right, so can't share code) +typedef struct +{ + stbi__uint16 fast[1 << STBI__ZFAST_BITS]; + stbi__uint16 firstcode[16]; + int maxcode[17]; + stbi__uint16 firstsymbol[16]; + stbi_uc size[STBI__ZNSYMS]; + stbi__uint16 value[STBI__ZNSYMS]; +} stbi__zhuffman; + +stbi_inline static int stbi__bitreverse16(int n) +{ + n = ((n & 0xAAAA) >> 1) | ((n & 0x5555) << 1); + n = ((n & 0xCCCC) >> 2) | ((n & 0x3333) << 2); + n = ((n & 0xF0F0) >> 4) | ((n & 0x0F0F) << 4); + n = ((n & 0xFF00) >> 8) | ((n & 0x00FF) << 8); + return n; +} + +stbi_inline static int stbi__bit_reverse(int v, int bits) +{ + STBI_ASSERT(bits <= 16); + // to bit reverse n bits, reverse 16 and shift + // e.g. 11 bits, bit reverse and shift away 5 + return stbi__bitreverse16(v) >> (16-bits); +} + +static int stbi__zbuild_huffman(stbi__zhuffman *z, const stbi_uc *sizelist, int num) +{ + int i,k=0; + int code, next_code[16], sizes[17]; + + // DEFLATE spec for generating codes + memset(sizes, 0, sizeof(sizes)); + memset(z->fast, 0, sizeof(z->fast)); + for (i=0; i < num; ++i) + ++sizes[sizelist[i]]; + sizes[0] = 0; + for (i=1; i < 16; ++i) + if (sizes[i] > (1 << i)) + return stbi__err("bad sizes", "Corrupt PNG"); + code = 0; + for (i=1; i < 16; ++i) { + next_code[i] = code; + z->firstcode[i] = (stbi__uint16) code; + z->firstsymbol[i] = (stbi__uint16) k; + code = (code + sizes[i]); + if (sizes[i]) + if (code-1 >= (1 << i)) return stbi__err("bad codelengths","Corrupt PNG"); + z->maxcode[i] = code << (16-i); // preshift for inner loop + code <<= 1; + k += sizes[i]; + } + z->maxcode[16] = 0x10000; // sentinel + for (i=0; i < num; ++i) { + int s = sizelist[i]; + if (s) { + int c = next_code[s] - z->firstcode[s] + z->firstsymbol[s]; + stbi__uint16 fastv = (stbi__uint16) ((s << 9) | i); + z->size [c] = (stbi_uc ) s; + z->value[c] = (stbi__uint16) i; + if (s <= STBI__ZFAST_BITS) { + int j = stbi__bit_reverse(next_code[s],s); + while (j < (1 << STBI__ZFAST_BITS)) { + z->fast[j] = fastv; + j += (1 << s); + } + } + ++next_code[s]; + } + } + return 1; +} + +// zlib-from-memory implementation for PNG reading +// because PNG allows splitting the zlib stream arbitrarily, +// and it's annoying structurally to have PNG call ZLIB call PNG, +// we require PNG read all the IDATs and combine them into a single +// memory buffer + +typedef struct +{ + stbi_uc *zbuffer, *zbuffer_end; + int num_bits; + stbi__uint32 code_buffer; + + char *zout; + char *zout_start; + char *zout_end; + int z_expandable; + + stbi__zhuffman z_length, z_distance; +} stbi__zbuf; + +stbi_inline static int stbi__zeof(stbi__zbuf *z) +{ + return (z->zbuffer >= z->zbuffer_end); +} + +stbi_inline static stbi_uc stbi__zget8(stbi__zbuf *z) +{ + return stbi__zeof(z) ? 0 : *z->zbuffer++; +} + +static void stbi__fill_bits(stbi__zbuf *z) +{ + do { + if (z->code_buffer >= (1U << z->num_bits)) { + z->zbuffer = z->zbuffer_end; /* treat this as EOF so we fail. */ + return; + } + z->code_buffer |= (unsigned int) stbi__zget8(z) << z->num_bits; + z->num_bits += 8; + } while (z->num_bits <= 24); +} + +stbi_inline static unsigned int stbi__zreceive(stbi__zbuf *z, int n) +{ + unsigned int k; + if (z->num_bits < n) stbi__fill_bits(z); + k = z->code_buffer & ((1 << n) - 1); + z->code_buffer >>= n; + z->num_bits -= n; + return k; +} + +static int stbi__zhuffman_decode_slowpath(stbi__zbuf *a, stbi__zhuffman *z) +{ + int b,s,k; + // not resolved by fast table, so compute it the slow way + // use jpeg approach, which requires MSbits at top + k = stbi__bit_reverse(a->code_buffer, 16); + for (s=STBI__ZFAST_BITS+1; ; ++s) + if (k < z->maxcode[s]) + break; + if (s >= 16) return -1; // invalid code! + // code size is s, so: + b = (k >> (16-s)) - z->firstcode[s] + z->firstsymbol[s]; + if (b >= STBI__ZNSYMS) return -1; // some data was corrupt somewhere! + if (z->size[b] != s) return -1; // was originally an assert, but report failure instead. + a->code_buffer >>= s; + a->num_bits -= s; + return z->value[b]; +} + +stbi_inline static int stbi__zhuffman_decode(stbi__zbuf *a, stbi__zhuffman *z) +{ + int b,s; + if (a->num_bits < 16) { + if (stbi__zeof(a)) { + return -1; /* report error for unexpected end of data. */ + } + stbi__fill_bits(a); + } + b = z->fast[a->code_buffer & STBI__ZFAST_MASK]; + if (b) { + s = b >> 9; + a->code_buffer >>= s; + a->num_bits -= s; + return b & 511; + } + return stbi__zhuffman_decode_slowpath(a, z); +} + +static int stbi__zexpand(stbi__zbuf *z, char *zout, int n) // need to make room for n bytes +{ + char *q; + unsigned int cur, limit, old_limit; + z->zout = zout; + if (!z->z_expandable) return stbi__err("output buffer limit","Corrupt PNG"); + cur = (unsigned int) (z->zout - z->zout_start); + limit = old_limit = (unsigned) (z->zout_end - z->zout_start); + if (UINT_MAX - cur < (unsigned) n) return stbi__err("outofmem", "Out of memory"); + while (cur + n > limit) { + if(limit > UINT_MAX / 2) return stbi__err("outofmem", "Out of memory"); + limit *= 2; + } + q = (char *) STBI_REALLOC_SIZED(z->zout_start, old_limit, limit); + STBI_NOTUSED(old_limit); + if (q == NULL) return stbi__err("outofmem", "Out of memory"); + z->zout_start = q; + z->zout = q + cur; + z->zout_end = q + limit; + return 1; +} + +static const int stbi__zlength_base[31] = { + 3,4,5,6,7,8,9,10,11,13, + 15,17,19,23,27,31,35,43,51,59, + 67,83,99,115,131,163,195,227,258,0,0 }; + +static const int stbi__zlength_extra[31]= +{ 0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0,0,0 }; + +static const int stbi__zdist_base[32] = { 1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193, +257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,0,0}; + +static const int stbi__zdist_extra[32] = +{ 0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; + +static int stbi__parse_huffman_block(stbi__zbuf *a) +{ + char *zout = a->zout; + for(;;) { + int z = stbi__zhuffman_decode(a, &a->z_length); + if (z < 256) { + if (z < 0) return stbi__err("bad huffman code","Corrupt PNG"); // error in huffman codes + if (zout >= a->zout_end) { + if (!stbi__zexpand(a, zout, 1)) return 0; + zout = a->zout; + } + *zout++ = (char) z; + } else { + stbi_uc *p; + int len,dist; + if (z == 256) { + a->zout = zout; + return 1; + } + if (z >= 286) return stbi__err("bad huffman code","Corrupt PNG"); // per DEFLATE, length codes 286 and 287 must not appear in compressed data + z -= 257; + len = stbi__zlength_base[z]; + if (stbi__zlength_extra[z]) len += stbi__zreceive(a, stbi__zlength_extra[z]); + z = stbi__zhuffman_decode(a, &a->z_distance); + if (z < 0 || z >= 30) return stbi__err("bad huffman code","Corrupt PNG"); // per DEFLATE, distance codes 30 and 31 must not appear in compressed data + dist = stbi__zdist_base[z]; + if (stbi__zdist_extra[z]) dist += stbi__zreceive(a, stbi__zdist_extra[z]); + if (zout - a->zout_start < dist) return stbi__err("bad dist","Corrupt PNG"); + if (zout + len > a->zout_end) { + if (!stbi__zexpand(a, zout, len)) return 0; + zout = a->zout; + } + p = (stbi_uc *) (zout - dist); + if (dist == 1) { // run of one byte; common in images. + stbi_uc v = *p; + if (len) { do *zout++ = v; while (--len); } + } else { + if (len) { do *zout++ = *p++; while (--len); } + } + } + } +} + +static int stbi__compute_huffman_codes(stbi__zbuf *a) +{ + static const stbi_uc length_dezigzag[19] = { 16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15 }; + stbi__zhuffman z_codelength; + stbi_uc lencodes[286+32+137];//padding for maximum single op + stbi_uc codelength_sizes[19]; + int i,n; + + int hlit = stbi__zreceive(a,5) + 257; + int hdist = stbi__zreceive(a,5) + 1; + int hclen = stbi__zreceive(a,4) + 4; + int ntot = hlit + hdist; + + memset(codelength_sizes, 0, sizeof(codelength_sizes)); + for (i=0; i < hclen; ++i) { + int s = stbi__zreceive(a,3); + codelength_sizes[length_dezigzag[i]] = (stbi_uc) s; + } + if (!stbi__zbuild_huffman(&z_codelength, codelength_sizes, 19)) return 0; + + n = 0; + while (n < ntot) { + int c = stbi__zhuffman_decode(a, &z_codelength); + if (c < 0 || c >= 19) return stbi__err("bad codelengths", "Corrupt PNG"); + if (c < 16) + lencodes[n++] = (stbi_uc) c; + else { + stbi_uc fill = 0; + if (c == 16) { + c = stbi__zreceive(a,2)+3; + if (n == 0) return stbi__err("bad codelengths", "Corrupt PNG"); + fill = lencodes[n-1]; + } else if (c == 17) { + c = stbi__zreceive(a,3)+3; + } else if (c == 18) { + c = stbi__zreceive(a,7)+11; + } else { + return stbi__err("bad codelengths", "Corrupt PNG"); + } + if (ntot - n < c) return stbi__err("bad codelengths", "Corrupt PNG"); + memset(lencodes+n, fill, c); + n += c; + } + } + if (n != ntot) return stbi__err("bad codelengths","Corrupt PNG"); + if (!stbi__zbuild_huffman(&a->z_length, lencodes, hlit)) return 0; + if (!stbi__zbuild_huffman(&a->z_distance, lencodes+hlit, hdist)) return 0; + return 1; +} + +static int stbi__parse_uncompressed_block(stbi__zbuf *a) +{ + stbi_uc header[4]; + int len,nlen,k; + if (a->num_bits & 7) + stbi__zreceive(a, a->num_bits & 7); // discard + // drain the bit-packed data into header + k = 0; + while (a->num_bits > 0) { + header[k++] = (stbi_uc) (a->code_buffer & 255); // suppress MSVC run-time check + a->code_buffer >>= 8; + a->num_bits -= 8; + } + if (a->num_bits < 0) return stbi__err("zlib corrupt","Corrupt PNG"); + // now fill header the normal way + while (k < 4) + header[k++] = stbi__zget8(a); + len = header[1] * 256 + header[0]; + nlen = header[3] * 256 + header[2]; + if (nlen != (len ^ 0xffff)) return stbi__err("zlib corrupt","Corrupt PNG"); + if (a->zbuffer + len > a->zbuffer_end) return stbi__err("read past buffer","Corrupt PNG"); + if (a->zout + len > a->zout_end) + if (!stbi__zexpand(a, a->zout, len)) return 0; + memcpy(a->zout, a->zbuffer, len); + a->zbuffer += len; + a->zout += len; + return 1; +} + +static int stbi__parse_zlib_header(stbi__zbuf *a) +{ + int cmf = stbi__zget8(a); + int cm = cmf & 15; + /* int cinfo = cmf >> 4; */ + int flg = stbi__zget8(a); + if (stbi__zeof(a)) return stbi__err("bad zlib header","Corrupt PNG"); // zlib spec + if ((cmf*256+flg) % 31 != 0) return stbi__err("bad zlib header","Corrupt PNG"); // zlib spec + if (flg & 32) return stbi__err("no preset dict","Corrupt PNG"); // preset dictionary not allowed in png + if (cm != 8) return stbi__err("bad compression","Corrupt PNG"); // DEFLATE required for png + // window = 1 << (8 + cinfo)... but who cares, we fully buffer output + return 1; +} + +static const stbi_uc stbi__zdefault_length[STBI__ZNSYMS] = +{ + 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, + 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, + 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, + 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, + 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, + 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, + 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, + 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, + 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8 +}; +static const stbi_uc stbi__zdefault_distance[32] = +{ + 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5 +}; +/* +Init algorithm: +{ + int i; // use <= to match clearly with spec + for (i=0; i <= 143; ++i) stbi__zdefault_length[i] = 8; + for ( ; i <= 255; ++i) stbi__zdefault_length[i] = 9; + for ( ; i <= 279; ++i) stbi__zdefault_length[i] = 7; + for ( ; i <= 287; ++i) stbi__zdefault_length[i] = 8; + + for (i=0; i <= 31; ++i) stbi__zdefault_distance[i] = 5; +} +*/ + +static int stbi__parse_zlib(stbi__zbuf *a, int parse_header) +{ + int final, type; + if (parse_header) + if (!stbi__parse_zlib_header(a)) return 0; + a->num_bits = 0; + a->code_buffer = 0; + do { + final = stbi__zreceive(a,1); + type = stbi__zreceive(a,2); + if (type == 0) { + if (!stbi__parse_uncompressed_block(a)) return 0; + } else if (type == 3) { + return 0; + } else { + if (type == 1) { + // use fixed code lengths + if (!stbi__zbuild_huffman(&a->z_length , stbi__zdefault_length , STBI__ZNSYMS)) return 0; + if (!stbi__zbuild_huffman(&a->z_distance, stbi__zdefault_distance, 32)) return 0; + } else { + if (!stbi__compute_huffman_codes(a)) return 0; + } + if (!stbi__parse_huffman_block(a)) return 0; + } + } while (!final); + return 1; +} + +static int stbi__do_zlib(stbi__zbuf *a, char *obuf, int olen, int exp, int parse_header) +{ + a->zout_start = obuf; + a->zout = obuf; + a->zout_end = obuf + olen; + a->z_expandable = exp; + + return stbi__parse_zlib(a, parse_header); +} + +STBIDEF char *stbi_zlib_decode_malloc_guesssize(const char *buffer, int len, int initial_size, int *outlen) +{ + stbi__zbuf a; + char *p = (char *) stbi__malloc(initial_size); + if (p == NULL) return NULL; + a.zbuffer = (stbi_uc *) buffer; + a.zbuffer_end = (stbi_uc *) buffer + len; + if (stbi__do_zlib(&a, p, initial_size, 1, 1)) { + if (outlen) *outlen = (int) (a.zout - a.zout_start); + return a.zout_start; + } else { + STBI_FREE(a.zout_start); + return NULL; + } +} + +STBIDEF char *stbi_zlib_decode_malloc(char const *buffer, int len, int *outlen) +{ + return stbi_zlib_decode_malloc_guesssize(buffer, len, 16384, outlen); +} + +STBIDEF char *stbi_zlib_decode_malloc_guesssize_headerflag(const char *buffer, int len, int initial_size, int *outlen, int parse_header) +{ + stbi__zbuf a; + char *p = (char *) stbi__malloc(initial_size); + if (p == NULL) return NULL; + a.zbuffer = (stbi_uc *) buffer; + a.zbuffer_end = (stbi_uc *) buffer + len; + if (stbi__do_zlib(&a, p, initial_size, 1, parse_header)) { + if (outlen) *outlen = (int) (a.zout - a.zout_start); + return a.zout_start; + } else { + STBI_FREE(a.zout_start); + return NULL; + } +} + +STBIDEF int stbi_zlib_decode_buffer(char *obuffer, int olen, char const *ibuffer, int ilen) +{ + stbi__zbuf a; + a.zbuffer = (stbi_uc *) ibuffer; + a.zbuffer_end = (stbi_uc *) ibuffer + ilen; + if (stbi__do_zlib(&a, obuffer, olen, 0, 1)) + return (int) (a.zout - a.zout_start); + else + return -1; +} + +STBIDEF char *stbi_zlib_decode_noheader_malloc(char const *buffer, int len, int *outlen) +{ + stbi__zbuf a; + char *p = (char *) stbi__malloc(16384); + if (p == NULL) return NULL; + a.zbuffer = (stbi_uc *) buffer; + a.zbuffer_end = (stbi_uc *) buffer+len; + if (stbi__do_zlib(&a, p, 16384, 1, 0)) { + if (outlen) *outlen = (int) (a.zout - a.zout_start); + return a.zout_start; + } else { + STBI_FREE(a.zout_start); + return NULL; + } +} + +STBIDEF int stbi_zlib_decode_noheader_buffer(char *obuffer, int olen, const char *ibuffer, int ilen) +{ + stbi__zbuf a; + a.zbuffer = (stbi_uc *) ibuffer; + a.zbuffer_end = (stbi_uc *) ibuffer + ilen; + if (stbi__do_zlib(&a, obuffer, olen, 0, 0)) + return (int) (a.zout - a.zout_start); + else + return -1; +} +#endif + +// public domain "baseline" PNG decoder v0.10 Sean Barrett 2006-11-18 +// simple implementation +// - only 8-bit samples +// - no CRC checking +// - allocates lots of intermediate memory +// - avoids problem of streaming data between subsystems +// - avoids explicit window management +// performance +// - uses stb_zlib, a PD zlib implementation with fast huffman decoding + +#ifndef STBI_NO_PNG +typedef struct +{ + stbi__uint32 length; + stbi__uint32 type; +} stbi__pngchunk; + +static stbi__pngchunk stbi__get_chunk_header(stbi__context *s) +{ + stbi__pngchunk c; + c.length = stbi__get32be(s); + c.type = stbi__get32be(s); + return c; +} + +static int stbi__check_png_header(stbi__context *s) +{ + static const stbi_uc png_sig[8] = { 137,80,78,71,13,10,26,10 }; + int i; + for (i=0; i < 8; ++i) + if (stbi__get8(s) != png_sig[i]) return stbi__err("bad png sig","Not a PNG"); + return 1; +} + +typedef struct +{ + stbi__context *s; + stbi_uc *idata, *expanded, *out; + int depth; +} stbi__png; + + +enum { + STBI__F_none=0, + STBI__F_sub=1, + STBI__F_up=2, + STBI__F_avg=3, + STBI__F_paeth=4, + // synthetic filters used for first scanline to avoid needing a dummy row of 0s + STBI__F_avg_first, + STBI__F_paeth_first +}; + +static stbi_uc first_row_filter[5] = +{ + STBI__F_none, + STBI__F_sub, + STBI__F_none, + STBI__F_avg_first, + STBI__F_paeth_first +}; + +static int stbi__paeth(int a, int b, int c) +{ + int p = a + b - c; + int pa = abs(p-a); + int pb = abs(p-b); + int pc = abs(p-c); + if (pa <= pb && pa <= pc) return a; + if (pb <= pc) return b; + return c; +} + +static const stbi_uc stbi__depth_scale_table[9] = { 0, 0xff, 0x55, 0, 0x11, 0,0,0, 0x01 }; + +// create the png data from post-deflated data +static int stbi__create_png_image_raw(stbi__png *a, stbi_uc *raw, stbi__uint32 raw_len, int out_n, stbi__uint32 x, stbi__uint32 y, int depth, int color) +{ + int bytes = (depth == 16? 2 : 1); + stbi__context *s = a->s; + stbi__uint32 i,j,stride = x*out_n*bytes; + stbi__uint32 img_len, img_width_bytes; + int k; + int img_n = s->img_n; // copy it into a local for later + + int output_bytes = out_n*bytes; + int filter_bytes = img_n*bytes; + int width = x; + + STBI_ASSERT(out_n == s->img_n || out_n == s->img_n+1); + a->out = (stbi_uc *) stbi__malloc_mad3(x, y, output_bytes, 0); // extra bytes to write off the end into + if (!a->out) return stbi__err("outofmem", "Out of memory"); + + if (!stbi__mad3sizes_valid(img_n, x, depth, 7)) return stbi__err("too large", "Corrupt PNG"); + img_width_bytes = (((img_n * x * depth) + 7) >> 3); + img_len = (img_width_bytes + 1) * y; + + // we used to check for exact match between raw_len and img_len on non-interlaced PNGs, + // but issue #276 reported a PNG in the wild that had extra data at the end (all zeros), + // so just check for raw_len < img_len always. + if (raw_len < img_len) return stbi__err("not enough pixels","Corrupt PNG"); + + for (j=0; j < y; ++j) { + stbi_uc *cur = a->out + stride*j; + stbi_uc *prior; + int filter = *raw++; + + if (filter > 4) + return stbi__err("invalid filter","Corrupt PNG"); + + if (depth < 8) { + if (img_width_bytes > x) return stbi__err("invalid width","Corrupt PNG"); + cur += x*out_n - img_width_bytes; // store output to the rightmost img_len bytes, so we can decode in place + filter_bytes = 1; + width = img_width_bytes; + } + prior = cur - stride; // bugfix: need to compute this after 'cur +=' computation above + + // if first row, use special filter that doesn't sample previous row + if (j == 0) filter = first_row_filter[filter]; + + // handle first byte explicitly + for (k=0; k < filter_bytes; ++k) { + switch (filter) { + case STBI__F_none : cur[k] = raw[k]; break; + case STBI__F_sub : cur[k] = raw[k]; break; + case STBI__F_up : cur[k] = STBI__BYTECAST(raw[k] + prior[k]); break; + case STBI__F_avg : cur[k] = STBI__BYTECAST(raw[k] + (prior[k]>>1)); break; + case STBI__F_paeth : cur[k] = STBI__BYTECAST(raw[k] + stbi__paeth(0,prior[k],0)); break; + case STBI__F_avg_first : cur[k] = raw[k]; break; + case STBI__F_paeth_first: cur[k] = raw[k]; break; + } + } + + if (depth == 8) { + if (img_n != out_n) + cur[img_n] = 255; // first pixel + raw += img_n; + cur += out_n; + prior += out_n; + } else if (depth == 16) { + if (img_n != out_n) { + cur[filter_bytes] = 255; // first pixel top byte + cur[filter_bytes+1] = 255; // first pixel bottom byte + } + raw += filter_bytes; + cur += output_bytes; + prior += output_bytes; + } else { + raw += 1; + cur += 1; + prior += 1; + } + + // this is a little gross, so that we don't switch per-pixel or per-component + if (depth < 8 || img_n == out_n) { + int nk = (width - 1)*filter_bytes; + #define STBI__CASE(f) \ + case f: \ + for (k=0; k < nk; ++k) + switch (filter) { + // "none" filter turns into a memcpy here; make that explicit. + case STBI__F_none: memcpy(cur, raw, nk); break; + STBI__CASE(STBI__F_sub) { cur[k] = STBI__BYTECAST(raw[k] + cur[k-filter_bytes]); } break; + STBI__CASE(STBI__F_up) { cur[k] = STBI__BYTECAST(raw[k] + prior[k]); } break; + STBI__CASE(STBI__F_avg) { cur[k] = STBI__BYTECAST(raw[k] + ((prior[k] + cur[k-filter_bytes])>>1)); } break; + STBI__CASE(STBI__F_paeth) { cur[k] = STBI__BYTECAST(raw[k] + stbi__paeth(cur[k-filter_bytes],prior[k],prior[k-filter_bytes])); } break; + STBI__CASE(STBI__F_avg_first) { cur[k] = STBI__BYTECAST(raw[k] + (cur[k-filter_bytes] >> 1)); } break; + STBI__CASE(STBI__F_paeth_first) { cur[k] = STBI__BYTECAST(raw[k] + stbi__paeth(cur[k-filter_bytes],0,0)); } break; + } + #undef STBI__CASE + raw += nk; + } else { + STBI_ASSERT(img_n+1 == out_n); + #define STBI__CASE(f) \ + case f: \ + for (i=x-1; i >= 1; --i, cur[filter_bytes]=255,raw+=filter_bytes,cur+=output_bytes,prior+=output_bytes) \ + for (k=0; k < filter_bytes; ++k) + switch (filter) { + STBI__CASE(STBI__F_none) { cur[k] = raw[k]; } break; + STBI__CASE(STBI__F_sub) { cur[k] = STBI__BYTECAST(raw[k] + cur[k- output_bytes]); } break; + STBI__CASE(STBI__F_up) { cur[k] = STBI__BYTECAST(raw[k] + prior[k]); } break; + STBI__CASE(STBI__F_avg) { cur[k] = STBI__BYTECAST(raw[k] + ((prior[k] + cur[k- output_bytes])>>1)); } break; + STBI__CASE(STBI__F_paeth) { cur[k] = STBI__BYTECAST(raw[k] + stbi__paeth(cur[k- output_bytes],prior[k],prior[k- output_bytes])); } break; + STBI__CASE(STBI__F_avg_first) { cur[k] = STBI__BYTECAST(raw[k] + (cur[k- output_bytes] >> 1)); } break; + STBI__CASE(STBI__F_paeth_first) { cur[k] = STBI__BYTECAST(raw[k] + stbi__paeth(cur[k- output_bytes],0,0)); } break; + } + #undef STBI__CASE + + // the loop above sets the high byte of the pixels' alpha, but for + // 16 bit png files we also need the low byte set. we'll do that here. + if (depth == 16) { + cur = a->out + stride*j; // start at the beginning of the row again + for (i=0; i < x; ++i,cur+=output_bytes) { + cur[filter_bytes+1] = 255; + } + } + } + } + + // we make a separate pass to expand bits to pixels; for performance, + // this could run two scanlines behind the above code, so it won't + // intefere with filtering but will still be in the cache. + if (depth < 8) { + for (j=0; j < y; ++j) { + stbi_uc *cur = a->out + stride*j; + stbi_uc *in = a->out + stride*j + x*out_n - img_width_bytes; + // unpack 1/2/4-bit into a 8-bit buffer. allows us to keep the common 8-bit path optimal at minimal cost for 1/2/4-bit + // png guarante byte alignment, if width is not multiple of 8/4/2 we'll decode dummy trailing data that will be skipped in the later loop + stbi_uc scale = (color == 0) ? stbi__depth_scale_table[depth] : 1; // scale grayscale values to 0..255 range + + // note that the final byte might overshoot and write more data than desired. + // we can allocate enough data that this never writes out of memory, but it + // could also overwrite the next scanline. can it overwrite non-empty data + // on the next scanline? yes, consider 1-pixel-wide scanlines with 1-bit-per-pixel. + // so we need to explicitly clamp the final ones + + if (depth == 4) { + for (k=x*img_n; k >= 2; k-=2, ++in) { + *cur++ = scale * ((*in >> 4) ); + *cur++ = scale * ((*in ) & 0x0f); + } + if (k > 0) *cur++ = scale * ((*in >> 4) ); + } else if (depth == 2) { + for (k=x*img_n; k >= 4; k-=4, ++in) { + *cur++ = scale * ((*in >> 6) ); + *cur++ = scale * ((*in >> 4) & 0x03); + *cur++ = scale * ((*in >> 2) & 0x03); + *cur++ = scale * ((*in ) & 0x03); + } + if (k > 0) *cur++ = scale * ((*in >> 6) ); + if (k > 1) *cur++ = scale * ((*in >> 4) & 0x03); + if (k > 2) *cur++ = scale * ((*in >> 2) & 0x03); + } else if (depth == 1) { + for (k=x*img_n; k >= 8; k-=8, ++in) { + *cur++ = scale * ((*in >> 7) ); + *cur++ = scale * ((*in >> 6) & 0x01); + *cur++ = scale * ((*in >> 5) & 0x01); + *cur++ = scale * ((*in >> 4) & 0x01); + *cur++ = scale * ((*in >> 3) & 0x01); + *cur++ = scale * ((*in >> 2) & 0x01); + *cur++ = scale * ((*in >> 1) & 0x01); + *cur++ = scale * ((*in ) & 0x01); + } + if (k > 0) *cur++ = scale * ((*in >> 7) ); + if (k > 1) *cur++ = scale * ((*in >> 6) & 0x01); + if (k > 2) *cur++ = scale * ((*in >> 5) & 0x01); + if (k > 3) *cur++ = scale * ((*in >> 4) & 0x01); + if (k > 4) *cur++ = scale * ((*in >> 3) & 0x01); + if (k > 5) *cur++ = scale * ((*in >> 2) & 0x01); + if (k > 6) *cur++ = scale * ((*in >> 1) & 0x01); + } + if (img_n != out_n) { + int q; + // insert alpha = 255 + cur = a->out + stride*j; + if (img_n == 1) { + for (q=x-1; q >= 0; --q) { + cur[q*2+1] = 255; + cur[q*2+0] = cur[q]; + } + } else { + STBI_ASSERT(img_n == 3); + for (q=x-1; q >= 0; --q) { + cur[q*4+3] = 255; + cur[q*4+2] = cur[q*3+2]; + cur[q*4+1] = cur[q*3+1]; + cur[q*4+0] = cur[q*3+0]; + } + } + } + } + } else if (depth == 16) { + // force the image data from big-endian to platform-native. + // this is done in a separate pass due to the decoding relying + // on the data being untouched, but could probably be done + // per-line during decode if care is taken. + stbi_uc *cur = a->out; + stbi__uint16 *cur16 = (stbi__uint16*)cur; + + for(i=0; i < x*y*out_n; ++i,cur16++,cur+=2) { + *cur16 = (cur[0] << 8) | cur[1]; + } + } + + return 1; +} + +static int stbi__create_png_image(stbi__png *a, stbi_uc *image_data, stbi__uint32 image_data_len, int out_n, int depth, int color, int interlaced) +{ + int bytes = (depth == 16 ? 2 : 1); + int out_bytes = out_n * bytes; + stbi_uc *final; + int p; + if (!interlaced) + return stbi__create_png_image_raw(a, image_data, image_data_len, out_n, a->s->img_x, a->s->img_y, depth, color); + + // de-interlacing + final = (stbi_uc *) stbi__malloc_mad3(a->s->img_x, a->s->img_y, out_bytes, 0); + if (!final) return stbi__err("outofmem", "Out of memory"); + for (p=0; p < 7; ++p) { + int xorig[] = { 0,4,0,2,0,1,0 }; + int yorig[] = { 0,0,4,0,2,0,1 }; + int xspc[] = { 8,8,4,4,2,2,1 }; + int yspc[] = { 8,8,8,4,4,2,2 }; + int i,j,x,y; + // pass1_x[4] = 0, pass1_x[5] = 1, pass1_x[12] = 1 + x = (a->s->img_x - xorig[p] + xspc[p]-1) / xspc[p]; + y = (a->s->img_y - yorig[p] + yspc[p]-1) / yspc[p]; + if (x && y) { + stbi__uint32 img_len = ((((a->s->img_n * x * depth) + 7) >> 3) + 1) * y; + if (!stbi__create_png_image_raw(a, image_data, image_data_len, out_n, x, y, depth, color)) { + STBI_FREE(final); + return 0; + } + for (j=0; j < y; ++j) { + for (i=0; i < x; ++i) { + int out_y = j*yspc[p]+yorig[p]; + int out_x = i*xspc[p]+xorig[p]; + memcpy(final + out_y*a->s->img_x*out_bytes + out_x*out_bytes, + a->out + (j*x+i)*out_bytes, out_bytes); + } + } + STBI_FREE(a->out); + image_data += img_len; + image_data_len -= img_len; + } + } + a->out = final; + + return 1; +} + +static int stbi__compute_transparency(stbi__png *z, stbi_uc tc[3], int out_n) +{ + stbi__context *s = z->s; + stbi__uint32 i, pixel_count = s->img_x * s->img_y; + stbi_uc *p = z->out; + + // compute color-based transparency, assuming we've + // already got 255 as the alpha value in the output + STBI_ASSERT(out_n == 2 || out_n == 4); + + if (out_n == 2) { + for (i=0; i < pixel_count; ++i) { + p[1] = (p[0] == tc[0] ? 0 : 255); + p += 2; + } + } else { + for (i=0; i < pixel_count; ++i) { + if (p[0] == tc[0] && p[1] == tc[1] && p[2] == tc[2]) + p[3] = 0; + p += 4; + } + } + return 1; +} + +static int stbi__compute_transparency16(stbi__png *z, stbi__uint16 tc[3], int out_n) +{ + stbi__context *s = z->s; + stbi__uint32 i, pixel_count = s->img_x * s->img_y; + stbi__uint16 *p = (stbi__uint16*) z->out; + + // compute color-based transparency, assuming we've + // already got 65535 as the alpha value in the output + STBI_ASSERT(out_n == 2 || out_n == 4); + + if (out_n == 2) { + for (i = 0; i < pixel_count; ++i) { + p[1] = (p[0] == tc[0] ? 0 : 65535); + p += 2; + } + } else { + for (i = 0; i < pixel_count; ++i) { + if (p[0] == tc[0] && p[1] == tc[1] && p[2] == tc[2]) + p[3] = 0; + p += 4; + } + } + return 1; +} + +static int stbi__expand_png_palette(stbi__png *a, stbi_uc *palette, int len, int pal_img_n) +{ + stbi__uint32 i, pixel_count = a->s->img_x * a->s->img_y; + stbi_uc *p, *temp_out, *orig = a->out; + + p = (stbi_uc *) stbi__malloc_mad2(pixel_count, pal_img_n, 0); + if (p == NULL) return stbi__err("outofmem", "Out of memory"); + + // between here and free(out) below, exitting would leak + temp_out = p; + + if (pal_img_n == 3) { + for (i=0; i < pixel_count; ++i) { + int n = orig[i]*4; + p[0] = palette[n ]; + p[1] = palette[n+1]; + p[2] = palette[n+2]; + p += 3; + } + } else { + for (i=0; i < pixel_count; ++i) { + int n = orig[i]*4; + p[0] = palette[n ]; + p[1] = palette[n+1]; + p[2] = palette[n+2]; + p[3] = palette[n+3]; + p += 4; + } + } + STBI_FREE(a->out); + a->out = temp_out; + + STBI_NOTUSED(len); + + return 1; +} + +static int stbi__unpremultiply_on_load_global = 0; +static int stbi__de_iphone_flag_global = 0; + +STBIDEF void stbi_set_unpremultiply_on_load(int flag_true_if_should_unpremultiply) +{ + stbi__unpremultiply_on_load_global = flag_true_if_should_unpremultiply; +} + +STBIDEF void stbi_convert_iphone_png_to_rgb(int flag_true_if_should_convert) +{ + stbi__de_iphone_flag_global = flag_true_if_should_convert; +} + +#ifndef STBI_THREAD_LOCAL +#define stbi__unpremultiply_on_load stbi__unpremultiply_on_load_global +#define stbi__de_iphone_flag stbi__de_iphone_flag_global +#else +static STBI_THREAD_LOCAL int stbi__unpremultiply_on_load_local, stbi__unpremultiply_on_load_set; +static STBI_THREAD_LOCAL int stbi__de_iphone_flag_local, stbi__de_iphone_flag_set; + +STBIDEF void stbi_set_unpremultiply_on_load_thread(int flag_true_if_should_unpremultiply) +{ + stbi__unpremultiply_on_load_local = flag_true_if_should_unpremultiply; + stbi__unpremultiply_on_load_set = 1; +} + +STBIDEF void stbi_convert_iphone_png_to_rgb_thread(int flag_true_if_should_convert) +{ + stbi__de_iphone_flag_local = flag_true_if_should_convert; + stbi__de_iphone_flag_set = 1; +} + +#define stbi__unpremultiply_on_load (stbi__unpremultiply_on_load_set \ + ? stbi__unpremultiply_on_load_local \ + : stbi__unpremultiply_on_load_global) +#define stbi__de_iphone_flag (stbi__de_iphone_flag_set \ + ? stbi__de_iphone_flag_local \ + : stbi__de_iphone_flag_global) +#endif // STBI_THREAD_LOCAL + +static void stbi__de_iphone(stbi__png *z) +{ + stbi__context *s = z->s; + stbi__uint32 i, pixel_count = s->img_x * s->img_y; + stbi_uc *p = z->out; + + if (s->img_out_n == 3) { // convert bgr to rgb + for (i=0; i < pixel_count; ++i) { + stbi_uc t = p[0]; + p[0] = p[2]; + p[2] = t; + p += 3; + } + } else { + STBI_ASSERT(s->img_out_n == 4); + if (stbi__unpremultiply_on_load) { + // convert bgr to rgb and unpremultiply + for (i=0; i < pixel_count; ++i) { + stbi_uc a = p[3]; + stbi_uc t = p[0]; + if (a) { + stbi_uc half = a / 2; + p[0] = (p[2] * 255 + half) / a; + p[1] = (p[1] * 255 + half) / a; + p[2] = ( t * 255 + half) / a; + } else { + p[0] = p[2]; + p[2] = t; + } + p += 4; + } + } else { + // convert bgr to rgb + for (i=0; i < pixel_count; ++i) { + stbi_uc t = p[0]; + p[0] = p[2]; + p[2] = t; + p += 4; + } + } + } +} + +#define STBI__PNG_TYPE(a,b,c,d) (((unsigned) (a) << 24) + ((unsigned) (b) << 16) + ((unsigned) (c) << 8) + (unsigned) (d)) + +static int stbi__parse_png_file(stbi__png *z, int scan, int req_comp) +{ + stbi_uc palette[1024], pal_img_n=0; + stbi_uc has_trans=0, tc[3]={0}; + stbi__uint16 tc16[3]; + stbi__uint32 ioff=0, idata_limit=0, i, pal_len=0; + int first=1,k,interlace=0, color=0, is_iphone=0; + stbi__context *s = z->s; + + z->expanded = NULL; + z->idata = NULL; + z->out = NULL; + + if (!stbi__check_png_header(s)) return 0; + + if (scan == STBI__SCAN_type) return 1; + + for (;;) { + stbi__pngchunk c = stbi__get_chunk_header(s); + switch (c.type) { + case STBI__PNG_TYPE('C','g','B','I'): + is_iphone = 1; + stbi__skip(s, c.length); + break; + case STBI__PNG_TYPE('I','H','D','R'): { + int comp,filter; + if (!first) return stbi__err("multiple IHDR","Corrupt PNG"); + first = 0; + if (c.length != 13) return stbi__err("bad IHDR len","Corrupt PNG"); + s->img_x = stbi__get32be(s); + s->img_y = stbi__get32be(s); + if (s->img_y > STBI_MAX_DIMENSIONS) return stbi__err("too large","Very large image (corrupt?)"); + if (s->img_x > STBI_MAX_DIMENSIONS) return stbi__err("too large","Very large image (corrupt?)"); + z->depth = stbi__get8(s); if (z->depth != 1 && z->depth != 2 && z->depth != 4 && z->depth != 8 && z->depth != 16) return stbi__err("1/2/4/8/16-bit only","PNG not supported: 1/2/4/8/16-bit only"); + color = stbi__get8(s); if (color > 6) return stbi__err("bad ctype","Corrupt PNG"); + if (color == 3 && z->depth == 16) return stbi__err("bad ctype","Corrupt PNG"); + if (color == 3) pal_img_n = 3; else if (color & 1) return stbi__err("bad ctype","Corrupt PNG"); + comp = stbi__get8(s); if (comp) return stbi__err("bad comp method","Corrupt PNG"); + filter= stbi__get8(s); if (filter) return stbi__err("bad filter method","Corrupt PNG"); + interlace = stbi__get8(s); if (interlace>1) return stbi__err("bad interlace method","Corrupt PNG"); + if (!s->img_x || !s->img_y) return stbi__err("0-pixel image","Corrupt PNG"); + if (!pal_img_n) { + s->img_n = (color & 2 ? 3 : 1) + (color & 4 ? 1 : 0); + if ((1 << 30) / s->img_x / s->img_n < s->img_y) return stbi__err("too large", "Image too large to decode"); + } else { + // if paletted, then pal_n is our final components, and + // img_n is # components to decompress/filter. + s->img_n = 1; + if ((1 << 30) / s->img_x / 4 < s->img_y) return stbi__err("too large","Corrupt PNG"); + } + // even with SCAN_header, have to scan to see if we have a tRNS + break; + } + + case STBI__PNG_TYPE('P','L','T','E'): { + if (first) return stbi__err("first not IHDR", "Corrupt PNG"); + if (c.length > 256*3) return stbi__err("invalid PLTE","Corrupt PNG"); + pal_len = c.length / 3; + if (pal_len * 3 != c.length) return stbi__err("invalid PLTE","Corrupt PNG"); + for (i=0; i < pal_len; ++i) { + palette[i*4+0] = stbi__get8(s); + palette[i*4+1] = stbi__get8(s); + palette[i*4+2] = stbi__get8(s); + palette[i*4+3] = 255; + } + break; + } + + case STBI__PNG_TYPE('t','R','N','S'): { + if (first) return stbi__err("first not IHDR", "Corrupt PNG"); + if (z->idata) return stbi__err("tRNS after IDAT","Corrupt PNG"); + if (pal_img_n) { + if (scan == STBI__SCAN_header) { s->img_n = 4; return 1; } + if (pal_len == 0) return stbi__err("tRNS before PLTE","Corrupt PNG"); + if (c.length > pal_len) return stbi__err("bad tRNS len","Corrupt PNG"); + pal_img_n = 4; + for (i=0; i < c.length; ++i) + palette[i*4+3] = stbi__get8(s); + } else { + if (!(s->img_n & 1)) return stbi__err("tRNS with alpha","Corrupt PNG"); + if (c.length != (stbi__uint32) s->img_n*2) return stbi__err("bad tRNS len","Corrupt PNG"); + has_trans = 1; + // non-paletted with tRNS = constant alpha. if header-scanning, we can stop now. + if (scan == STBI__SCAN_header) { ++s->img_n; return 1; } + if (z->depth == 16) { + for (k = 0; k < s->img_n; ++k) tc16[k] = (stbi__uint16)stbi__get16be(s); // copy the values as-is + } else { + for (k = 0; k < s->img_n; ++k) tc[k] = (stbi_uc)(stbi__get16be(s) & 255) * stbi__depth_scale_table[z->depth]; // non 8-bit images will be larger + } + } + break; + } + + case STBI__PNG_TYPE('I','D','A','T'): { + if (first) return stbi__err("first not IHDR", "Corrupt PNG"); + if (pal_img_n && !pal_len) return stbi__err("no PLTE","Corrupt PNG"); + if (scan == STBI__SCAN_header) { + // header scan definitely stops at first IDAT + if (pal_img_n) + s->img_n = pal_img_n; + return 1; + } + if (c.length > (1u << 30)) return stbi__err("IDAT size limit", "IDAT section larger than 2^30 bytes"); + if ((int)(ioff + c.length) < (int)ioff) return 0; + if (ioff + c.length > idata_limit) { + stbi__uint32 idata_limit_old = idata_limit; + stbi_uc *p; + if (idata_limit == 0) idata_limit = c.length > 4096 ? c.length : 4096; + while (ioff + c.length > idata_limit) + idata_limit *= 2; + STBI_NOTUSED(idata_limit_old); + p = (stbi_uc *) STBI_REALLOC_SIZED(z->idata, idata_limit_old, idata_limit); if (p == NULL) return stbi__err("outofmem", "Out of memory"); + z->idata = p; + } + if (!stbi__getn(s, z->idata+ioff,c.length)) return stbi__err("outofdata","Corrupt PNG"); + ioff += c.length; + break; + } + + case STBI__PNG_TYPE('I','E','N','D'): { + stbi__uint32 raw_len, bpl; + if (first) return stbi__err("first not IHDR", "Corrupt PNG"); + if (scan != STBI__SCAN_load) return 1; + if (z->idata == NULL) return stbi__err("no IDAT","Corrupt PNG"); + // initial guess for decoded data size to avoid unnecessary reallocs + bpl = (s->img_x * z->depth + 7) / 8; // bytes per line, per component + raw_len = bpl * s->img_y * s->img_n /* pixels */ + s->img_y /* filter mode per row */; + z->expanded = (stbi_uc *) stbi_zlib_decode_malloc_guesssize_headerflag((char *) z->idata, ioff, raw_len, (int *) &raw_len, !is_iphone); + if (z->expanded == NULL) return 0; // zlib should set error + STBI_FREE(z->idata); z->idata = NULL; + if ((req_comp == s->img_n+1 && req_comp != 3 && !pal_img_n) || has_trans) + s->img_out_n = s->img_n+1; + else + s->img_out_n = s->img_n; + if (!stbi__create_png_image(z, z->expanded, raw_len, s->img_out_n, z->depth, color, interlace)) return 0; + if (has_trans) { + if (z->depth == 16) { + if (!stbi__compute_transparency16(z, tc16, s->img_out_n)) return 0; + } else { + if (!stbi__compute_transparency(z, tc, s->img_out_n)) return 0; + } + } + if (is_iphone && stbi__de_iphone_flag && s->img_out_n > 2) + stbi__de_iphone(z); + if (pal_img_n) { + // pal_img_n == 3 or 4 + s->img_n = pal_img_n; // record the actual colors we had + s->img_out_n = pal_img_n; + if (req_comp >= 3) s->img_out_n = req_comp; + if (!stbi__expand_png_palette(z, palette, pal_len, s->img_out_n)) + return 0; + } else if (has_trans) { + // non-paletted image with tRNS -> source image has (constant) alpha + ++s->img_n; + } + STBI_FREE(z->expanded); z->expanded = NULL; + // end of PNG chunk, read and skip CRC + stbi__get32be(s); + return 1; + } + + default: + // if critical, fail + if (first) return stbi__err("first not IHDR", "Corrupt PNG"); + if ((c.type & (1 << 29)) == 0) { + #ifndef STBI_NO_FAILURE_STRINGS + // not threadsafe + static char invalid_chunk[] = "XXXX PNG chunk not known"; + invalid_chunk[0] = STBI__BYTECAST(c.type >> 24); + invalid_chunk[1] = STBI__BYTECAST(c.type >> 16); + invalid_chunk[2] = STBI__BYTECAST(c.type >> 8); + invalid_chunk[3] = STBI__BYTECAST(c.type >> 0); + #endif + return stbi__err(invalid_chunk, "PNG not supported: unknown PNG chunk type"); + } + stbi__skip(s, c.length); + break; + } + // end of PNG chunk, read and skip CRC + stbi__get32be(s); + } +} + +static void *stbi__do_png(stbi__png *p, int *x, int *y, int *n, int req_comp, stbi__result_info *ri) +{ + void *result=NULL; + if (req_comp < 0 || req_comp > 4) return stbi__errpuc("bad req_comp", "Internal error"); + if (stbi__parse_png_file(p, STBI__SCAN_load, req_comp)) { + if (p->depth <= 8) + ri->bits_per_channel = 8; + else if (p->depth == 16) + ri->bits_per_channel = 16; + else + return stbi__errpuc("bad bits_per_channel", "PNG not supported: unsupported color depth"); + result = p->out; + p->out = NULL; + if (req_comp && req_comp != p->s->img_out_n) { + if (ri->bits_per_channel == 8) + result = stbi__convert_format((unsigned char *) result, p->s->img_out_n, req_comp, p->s->img_x, p->s->img_y); + else + result = stbi__convert_format16((stbi__uint16 *) result, p->s->img_out_n, req_comp, p->s->img_x, p->s->img_y); + p->s->img_out_n = req_comp; + if (result == NULL) return result; + } + *x = p->s->img_x; + *y = p->s->img_y; + if (n) *n = p->s->img_n; + } + STBI_FREE(p->out); p->out = NULL; + STBI_FREE(p->expanded); p->expanded = NULL; + STBI_FREE(p->idata); p->idata = NULL; + + return result; +} + +static void *stbi__png_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri) +{ + stbi__png p; + p.s = s; + return stbi__do_png(&p, x,y,comp,req_comp, ri); +} + +static int stbi__png_test(stbi__context *s) +{ + int r; + r = stbi__check_png_header(s); + stbi__rewind(s); + return r; +} + +static int stbi__png_info_raw(stbi__png *p, int *x, int *y, int *comp) +{ + if (!stbi__parse_png_file(p, STBI__SCAN_header, 0)) { + stbi__rewind( p->s ); + return 0; + } + if (x) *x = p->s->img_x; + if (y) *y = p->s->img_y; + if (comp) *comp = p->s->img_n; + return 1; +} + +static int stbi__png_info(stbi__context *s, int *x, int *y, int *comp) +{ + stbi__png p; + p.s = s; + return stbi__png_info_raw(&p, x, y, comp); +} + +static int stbi__png_is16(stbi__context *s) +{ + stbi__png p; + p.s = s; + if (!stbi__png_info_raw(&p, NULL, NULL, NULL)) + return 0; + if (p.depth != 16) { + stbi__rewind(p.s); + return 0; + } + return 1; +} +#endif + +// Microsoft/Windows BMP image + +#ifndef STBI_NO_BMP +static int stbi__bmp_test_raw(stbi__context *s) +{ + int r; + int sz; + if (stbi__get8(s) != 'B') return 0; + if (stbi__get8(s) != 'M') return 0; + stbi__get32le(s); // discard filesize + stbi__get16le(s); // discard reserved + stbi__get16le(s); // discard reserved + stbi__get32le(s); // discard data offset + sz = stbi__get32le(s); + r = (sz == 12 || sz == 40 || sz == 56 || sz == 108 || sz == 124); + return r; +} + +static int stbi__bmp_test(stbi__context *s) +{ + int r = stbi__bmp_test_raw(s); + stbi__rewind(s); + return r; +} + + +// returns 0..31 for the highest set bit +static int stbi__high_bit(unsigned int z) +{ + int n=0; + if (z == 0) return -1; + if (z >= 0x10000) { n += 16; z >>= 16; } + if (z >= 0x00100) { n += 8; z >>= 8; } + if (z >= 0x00010) { n += 4; z >>= 4; } + if (z >= 0x00004) { n += 2; z >>= 2; } + if (z >= 0x00002) { n += 1;/* >>= 1;*/ } + return n; +} + +static int stbi__bitcount(unsigned int a) +{ + a = (a & 0x55555555) + ((a >> 1) & 0x55555555); // max 2 + a = (a & 0x33333333) + ((a >> 2) & 0x33333333); // max 4 + a = (a + (a >> 4)) & 0x0f0f0f0f; // max 8 per 4, now 8 bits + a = (a + (a >> 8)); // max 16 per 8 bits + a = (a + (a >> 16)); // max 32 per 8 bits + return a & 0xff; +} + +// extract an arbitrarily-aligned N-bit value (N=bits) +// from v, and then make it 8-bits long and fractionally +// extend it to full full range. +static int stbi__shiftsigned(unsigned int v, int shift, int bits) +{ + static unsigned int mul_table[9] = { + 0, + 0xff/*0b11111111*/, 0x55/*0b01010101*/, 0x49/*0b01001001*/, 0x11/*0b00010001*/, + 0x21/*0b00100001*/, 0x41/*0b01000001*/, 0x81/*0b10000001*/, 0x01/*0b00000001*/, + }; + static unsigned int shift_table[9] = { + 0, 0,0,1,0,2,4,6,0, + }; + if (shift < 0) + v <<= -shift; + else + v >>= shift; + STBI_ASSERT(v < 256); + v >>= (8-bits); + STBI_ASSERT(bits >= 0 && bits <= 8); + return (int) ((unsigned) v * mul_table[bits]) >> shift_table[bits]; +} + +typedef struct +{ + int bpp, offset, hsz; + unsigned int mr,mg,mb,ma, all_a; + int extra_read; +} stbi__bmp_data; + +static int stbi__bmp_set_mask_defaults(stbi__bmp_data *info, int compress) +{ + // BI_BITFIELDS specifies masks explicitly, don't override + if (compress == 3) + return 1; + + if (compress == 0) { + if (info->bpp == 16) { + info->mr = 31u << 10; + info->mg = 31u << 5; + info->mb = 31u << 0; + } else if (info->bpp == 32) { + info->mr = 0xffu << 16; + info->mg = 0xffu << 8; + info->mb = 0xffu << 0; + info->ma = 0xffu << 24; + info->all_a = 0; // if all_a is 0 at end, then we loaded alpha channel but it was all 0 + } else { + // otherwise, use defaults, which is all-0 + info->mr = info->mg = info->mb = info->ma = 0; + } + return 1; + } + return 0; // error +} + +static void *stbi__bmp_parse_header(stbi__context *s, stbi__bmp_data *info) +{ + int hsz; + if (stbi__get8(s) != 'B' || stbi__get8(s) != 'M') return stbi__errpuc("not BMP", "Corrupt BMP"); + stbi__get32le(s); // discard filesize + stbi__get16le(s); // discard reserved + stbi__get16le(s); // discard reserved + info->offset = stbi__get32le(s); + info->hsz = hsz = stbi__get32le(s); + info->mr = info->mg = info->mb = info->ma = 0; + info->extra_read = 14; + + if (info->offset < 0) return stbi__errpuc("bad BMP", "bad BMP"); + + if (hsz != 12 && hsz != 40 && hsz != 56 && hsz != 108 && hsz != 124) return stbi__errpuc("unknown BMP", "BMP type not supported: unknown"); + if (hsz == 12) { + s->img_x = stbi__get16le(s); + s->img_y = stbi__get16le(s); + } else { + s->img_x = stbi__get32le(s); + s->img_y = stbi__get32le(s); + } + if (stbi__get16le(s) != 1) return stbi__errpuc("bad BMP", "bad BMP"); + info->bpp = stbi__get16le(s); + if (hsz != 12) { + int compress = stbi__get32le(s); + if (compress == 1 || compress == 2) return stbi__errpuc("BMP RLE", "BMP type not supported: RLE"); + if (compress >= 4) return stbi__errpuc("BMP JPEG/PNG", "BMP type not supported: unsupported compression"); // this includes PNG/JPEG modes + if (compress == 3 && info->bpp != 16 && info->bpp != 32) return stbi__errpuc("bad BMP", "bad BMP"); // bitfields requires 16 or 32 bits/pixel + stbi__get32le(s); // discard sizeof + stbi__get32le(s); // discard hres + stbi__get32le(s); // discard vres + stbi__get32le(s); // discard colorsused + stbi__get32le(s); // discard max important + if (hsz == 40 || hsz == 56) { + if (hsz == 56) { + stbi__get32le(s); + stbi__get32le(s); + stbi__get32le(s); + stbi__get32le(s); + } + if (info->bpp == 16 || info->bpp == 32) { + if (compress == 0) { + stbi__bmp_set_mask_defaults(info, compress); + } else if (compress == 3) { + info->mr = stbi__get32le(s); + info->mg = stbi__get32le(s); + info->mb = stbi__get32le(s); + info->extra_read += 12; + // not documented, but generated by photoshop and handled by mspaint + if (info->mr == info->mg && info->mg == info->mb) { + // ?!?!? + return stbi__errpuc("bad BMP", "bad BMP"); + } + } else + return stbi__errpuc("bad BMP", "bad BMP"); + } + } else { + // V4/V5 header + int i; + if (hsz != 108 && hsz != 124) + return stbi__errpuc("bad BMP", "bad BMP"); + info->mr = stbi__get32le(s); + info->mg = stbi__get32le(s); + info->mb = stbi__get32le(s); + info->ma = stbi__get32le(s); + if (compress != 3) // override mr/mg/mb unless in BI_BITFIELDS mode, as per docs + stbi__bmp_set_mask_defaults(info, compress); + stbi__get32le(s); // discard color space + for (i=0; i < 12; ++i) + stbi__get32le(s); // discard color space parameters + if (hsz == 124) { + stbi__get32le(s); // discard rendering intent + stbi__get32le(s); // discard offset of profile data + stbi__get32le(s); // discard size of profile data + stbi__get32le(s); // discard reserved + } + } + } + return (void *) 1; +} + + +static void *stbi__bmp_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri) +{ + stbi_uc *out; + unsigned int mr=0,mg=0,mb=0,ma=0, all_a; + stbi_uc pal[256][4]; + int psize=0,i,j,width; + int flip_vertically, pad, target; + stbi__bmp_data info; + STBI_NOTUSED(ri); + + info.all_a = 255; + if (stbi__bmp_parse_header(s, &info) == NULL) + return NULL; // error code already set + + flip_vertically = ((int) s->img_y) > 0; + s->img_y = abs((int) s->img_y); + + if (s->img_y > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + if (s->img_x > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + + mr = info.mr; + mg = info.mg; + mb = info.mb; + ma = info.ma; + all_a = info.all_a; + + if (info.hsz == 12) { + if (info.bpp < 24) + psize = (info.offset - info.extra_read - 24) / 3; + } else { + if (info.bpp < 16) + psize = (info.offset - info.extra_read - info.hsz) >> 2; + } + if (psize == 0) { + // accept some number of extra bytes after the header, but if the offset points either to before + // the header ends or implies a large amount of extra data, reject the file as malformed + int bytes_read_so_far = s->callback_already_read + (int)(s->img_buffer - s->img_buffer_original); + int header_limit = 1024; // max we actually read is below 256 bytes currently. + int extra_data_limit = 256*4; // what ordinarily goes here is a palette; 256 entries*4 bytes is its max size. + if (bytes_read_so_far <= 0 || bytes_read_so_far > header_limit) { + return stbi__errpuc("bad header", "Corrupt BMP"); + } + // we established that bytes_read_so_far is positive and sensible. + // the first half of this test rejects offsets that are either too small positives, or + // negative, and guarantees that info.offset >= bytes_read_so_far > 0. this in turn + // ensures the number computed in the second half of the test can't overflow. + if (info.offset < bytes_read_so_far || info.offset - bytes_read_so_far > extra_data_limit) { + return stbi__errpuc("bad offset", "Corrupt BMP"); + } else { + stbi__skip(s, info.offset - bytes_read_so_far); + } + } + + if (info.bpp == 24 && ma == 0xff000000) + s->img_n = 3; + else + s->img_n = ma ? 4 : 3; + if (req_comp && req_comp >= 3) // we can directly decode 3 or 4 + target = req_comp; + else + target = s->img_n; // if they want monochrome, we'll post-convert + + // sanity-check size + if (!stbi__mad3sizes_valid(target, s->img_x, s->img_y, 0)) + return stbi__errpuc("too large", "Corrupt BMP"); + + out = (stbi_uc *) stbi__malloc_mad3(target, s->img_x, s->img_y, 0); + if (!out) return stbi__errpuc("outofmem", "Out of memory"); + if (info.bpp < 16) { + int z=0; + if (psize == 0 || psize > 256) { STBI_FREE(out); return stbi__errpuc("invalid", "Corrupt BMP"); } + for (i=0; i < psize; ++i) { + pal[i][2] = stbi__get8(s); + pal[i][1] = stbi__get8(s); + pal[i][0] = stbi__get8(s); + if (info.hsz != 12) stbi__get8(s); + pal[i][3] = 255; + } + stbi__skip(s, info.offset - info.extra_read - info.hsz - psize * (info.hsz == 12 ? 3 : 4)); + if (info.bpp == 1) width = (s->img_x + 7) >> 3; + else if (info.bpp == 4) width = (s->img_x + 1) >> 1; + else if (info.bpp == 8) width = s->img_x; + else { STBI_FREE(out); return stbi__errpuc("bad bpp", "Corrupt BMP"); } + pad = (-width)&3; + if (info.bpp == 1) { + for (j=0; j < (int) s->img_y; ++j) { + int bit_offset = 7, v = stbi__get8(s); + for (i=0; i < (int) s->img_x; ++i) { + int color = (v>>bit_offset)&0x1; + out[z++] = pal[color][0]; + out[z++] = pal[color][1]; + out[z++] = pal[color][2]; + if (target == 4) out[z++] = 255; + if (i+1 == (int) s->img_x) break; + if((--bit_offset) < 0) { + bit_offset = 7; + v = stbi__get8(s); + } + } + stbi__skip(s, pad); + } + } else { + for (j=0; j < (int) s->img_y; ++j) { + for (i=0; i < (int) s->img_x; i += 2) { + int v=stbi__get8(s),v2=0; + if (info.bpp == 4) { + v2 = v & 15; + v >>= 4; + } + out[z++] = pal[v][0]; + out[z++] = pal[v][1]; + out[z++] = pal[v][2]; + if (target == 4) out[z++] = 255; + if (i+1 == (int) s->img_x) break; + v = (info.bpp == 8) ? stbi__get8(s) : v2; + out[z++] = pal[v][0]; + out[z++] = pal[v][1]; + out[z++] = pal[v][2]; + if (target == 4) out[z++] = 255; + } + stbi__skip(s, pad); + } + } + } else { + int rshift=0,gshift=0,bshift=0,ashift=0,rcount=0,gcount=0,bcount=0,acount=0; + int z = 0; + int easy=0; + stbi__skip(s, info.offset - info.extra_read - info.hsz); + if (info.bpp == 24) width = 3 * s->img_x; + else if (info.bpp == 16) width = 2*s->img_x; + else /* bpp = 32 and pad = 0 */ width=0; + pad = (-width) & 3; + if (info.bpp == 24) { + easy = 1; + } else if (info.bpp == 32) { + if (mb == 0xff && mg == 0xff00 && mr == 0x00ff0000 && ma == 0xff000000) + easy = 2; + } + if (!easy) { + if (!mr || !mg || !mb) { STBI_FREE(out); return stbi__errpuc("bad masks", "Corrupt BMP"); } + // right shift amt to put high bit in position #7 + rshift = stbi__high_bit(mr)-7; rcount = stbi__bitcount(mr); + gshift = stbi__high_bit(mg)-7; gcount = stbi__bitcount(mg); + bshift = stbi__high_bit(mb)-7; bcount = stbi__bitcount(mb); + ashift = stbi__high_bit(ma)-7; acount = stbi__bitcount(ma); + if (rcount > 8 || gcount > 8 || bcount > 8 || acount > 8) { STBI_FREE(out); return stbi__errpuc("bad masks", "Corrupt BMP"); } + } + for (j=0; j < (int) s->img_y; ++j) { + if (easy) { + for (i=0; i < (int) s->img_x; ++i) { + unsigned char a; + out[z+2] = stbi__get8(s); + out[z+1] = stbi__get8(s); + out[z+0] = stbi__get8(s); + z += 3; + a = (easy == 2 ? stbi__get8(s) : 255); + all_a |= a; + if (target == 4) out[z++] = a; + } + } else { + int bpp = info.bpp; + for (i=0; i < (int) s->img_x; ++i) { + stbi__uint32 v = (bpp == 16 ? (stbi__uint32) stbi__get16le(s) : stbi__get32le(s)); + unsigned int a; + out[z++] = STBI__BYTECAST(stbi__shiftsigned(v & mr, rshift, rcount)); + out[z++] = STBI__BYTECAST(stbi__shiftsigned(v & mg, gshift, gcount)); + out[z++] = STBI__BYTECAST(stbi__shiftsigned(v & mb, bshift, bcount)); + a = (ma ? stbi__shiftsigned(v & ma, ashift, acount) : 255); + all_a |= a; + if (target == 4) out[z++] = STBI__BYTECAST(a); + } + } + stbi__skip(s, pad); + } + } + + // if alpha channel is all 0s, replace with all 255s + if (target == 4 && all_a == 0) + for (i=4*s->img_x*s->img_y-1; i >= 0; i -= 4) + out[i] = 255; + + if (flip_vertically) { + stbi_uc t; + for (j=0; j < (int) s->img_y>>1; ++j) { + stbi_uc *p1 = out + j *s->img_x*target; + stbi_uc *p2 = out + (s->img_y-1-j)*s->img_x*target; + for (i=0; i < (int) s->img_x*target; ++i) { + t = p1[i]; p1[i] = p2[i]; p2[i] = t; + } + } + } + + if (req_comp && req_comp != target) { + out = stbi__convert_format(out, target, req_comp, s->img_x, s->img_y); + if (out == NULL) return out; // stbi__convert_format frees input on failure + } + + *x = s->img_x; + *y = s->img_y; + if (comp) *comp = s->img_n; + return out; +} +#endif + +// Targa Truevision - TGA +// by Jonathan Dummer +#ifndef STBI_NO_TGA +// returns STBI_rgb or whatever, 0 on error +static int stbi__tga_get_comp(int bits_per_pixel, int is_grey, int* is_rgb16) +{ + // only RGB or RGBA (incl. 16bit) or grey allowed + if (is_rgb16) *is_rgb16 = 0; + switch(bits_per_pixel) { + case 8: return STBI_grey; + case 16: if(is_grey) return STBI_grey_alpha; + // fallthrough + case 15: if(is_rgb16) *is_rgb16 = 1; + return STBI_rgb; + case 24: // fallthrough + case 32: return bits_per_pixel/8; + default: return 0; + } +} + +static int stbi__tga_info(stbi__context *s, int *x, int *y, int *comp) +{ + int tga_w, tga_h, tga_comp, tga_image_type, tga_bits_per_pixel, tga_colormap_bpp; + int sz, tga_colormap_type; + stbi__get8(s); // discard Offset + tga_colormap_type = stbi__get8(s); // colormap type + if( tga_colormap_type > 1 ) { + stbi__rewind(s); + return 0; // only RGB or indexed allowed + } + tga_image_type = stbi__get8(s); // image type + if ( tga_colormap_type == 1 ) { // colormapped (paletted) image + if (tga_image_type != 1 && tga_image_type != 9) { + stbi__rewind(s); + return 0; + } + stbi__skip(s,4); // skip index of first colormap entry and number of entries + sz = stbi__get8(s); // check bits per palette color entry + if ( (sz != 8) && (sz != 15) && (sz != 16) && (sz != 24) && (sz != 32) ) { + stbi__rewind(s); + return 0; + } + stbi__skip(s,4); // skip image x and y origin + tga_colormap_bpp = sz; + } else { // "normal" image w/o colormap - only RGB or grey allowed, +/- RLE + if ( (tga_image_type != 2) && (tga_image_type != 3) && (tga_image_type != 10) && (tga_image_type != 11) ) { + stbi__rewind(s); + return 0; // only RGB or grey allowed, +/- RLE + } + stbi__skip(s,9); // skip colormap specification and image x/y origin + tga_colormap_bpp = 0; + } + tga_w = stbi__get16le(s); + if( tga_w < 1 ) { + stbi__rewind(s); + return 0; // test width + } + tga_h = stbi__get16le(s); + if( tga_h < 1 ) { + stbi__rewind(s); + return 0; // test height + } + tga_bits_per_pixel = stbi__get8(s); // bits per pixel + stbi__get8(s); // ignore alpha bits + if (tga_colormap_bpp != 0) { + if((tga_bits_per_pixel != 8) && (tga_bits_per_pixel != 16)) { + // when using a colormap, tga_bits_per_pixel is the size of the indexes + // I don't think anything but 8 or 16bit indexes makes sense + stbi__rewind(s); + return 0; + } + tga_comp = stbi__tga_get_comp(tga_colormap_bpp, 0, NULL); + } else { + tga_comp = stbi__tga_get_comp(tga_bits_per_pixel, (tga_image_type == 3) || (tga_image_type == 11), NULL); + } + if(!tga_comp) { + stbi__rewind(s); + return 0; + } + if (x) *x = tga_w; + if (y) *y = tga_h; + if (comp) *comp = tga_comp; + return 1; // seems to have passed everything +} + +static int stbi__tga_test(stbi__context *s) +{ + int res = 0; + int sz, tga_color_type; + stbi__get8(s); // discard Offset + tga_color_type = stbi__get8(s); // color type + if ( tga_color_type > 1 ) goto errorEnd; // only RGB or indexed allowed + sz = stbi__get8(s); // image type + if ( tga_color_type == 1 ) { // colormapped (paletted) image + if (sz != 1 && sz != 9) goto errorEnd; // colortype 1 demands image type 1 or 9 + stbi__skip(s,4); // skip index of first colormap entry and number of entries + sz = stbi__get8(s); // check bits per palette color entry + if ( (sz != 8) && (sz != 15) && (sz != 16) && (sz != 24) && (sz != 32) ) goto errorEnd; + stbi__skip(s,4); // skip image x and y origin + } else { // "normal" image w/o colormap + if ( (sz != 2) && (sz != 3) && (sz != 10) && (sz != 11) ) goto errorEnd; // only RGB or grey allowed, +/- RLE + stbi__skip(s,9); // skip colormap specification and image x/y origin + } + if ( stbi__get16le(s) < 1 ) goto errorEnd; // test width + if ( stbi__get16le(s) < 1 ) goto errorEnd; // test height + sz = stbi__get8(s); // bits per pixel + if ( (tga_color_type == 1) && (sz != 8) && (sz != 16) ) goto errorEnd; // for colormapped images, bpp is size of an index + if ( (sz != 8) && (sz != 15) && (sz != 16) && (sz != 24) && (sz != 32) ) goto errorEnd; + + res = 1; // if we got this far, everything's good and we can return 1 instead of 0 + +errorEnd: + stbi__rewind(s); + return res; +} + +// read 16bit value and convert to 24bit RGB +static void stbi__tga_read_rgb16(stbi__context *s, stbi_uc* out) +{ + stbi__uint16 px = (stbi__uint16)stbi__get16le(s); + stbi__uint16 fiveBitMask = 31; + // we have 3 channels with 5bits each + int r = (px >> 10) & fiveBitMask; + int g = (px >> 5) & fiveBitMask; + int b = px & fiveBitMask; + // Note that this saves the data in RGB(A) order, so it doesn't need to be swapped later + out[0] = (stbi_uc)((r * 255)/31); + out[1] = (stbi_uc)((g * 255)/31); + out[2] = (stbi_uc)((b * 255)/31); + + // some people claim that the most significant bit might be used for alpha + // (possibly if an alpha-bit is set in the "image descriptor byte") + // but that only made 16bit test images completely translucent.. + // so let's treat all 15 and 16bit TGAs as RGB with no alpha. +} + +static void *stbi__tga_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri) +{ + // read in the TGA header stuff + int tga_offset = stbi__get8(s); + int tga_indexed = stbi__get8(s); + int tga_image_type = stbi__get8(s); + int tga_is_RLE = 0; + int tga_palette_start = stbi__get16le(s); + int tga_palette_len = stbi__get16le(s); + int tga_palette_bits = stbi__get8(s); + int tga_x_origin = stbi__get16le(s); + int tga_y_origin = stbi__get16le(s); + int tga_width = stbi__get16le(s); + int tga_height = stbi__get16le(s); + int tga_bits_per_pixel = stbi__get8(s); + int tga_comp, tga_rgb16=0; + int tga_inverted = stbi__get8(s); + // int tga_alpha_bits = tga_inverted & 15; // the 4 lowest bits - unused (useless?) + // image data + unsigned char *tga_data; + unsigned char *tga_palette = NULL; + int i, j; + unsigned char raw_data[4] = {0}; + int RLE_count = 0; + int RLE_repeating = 0; + int read_next_pixel = 1; + STBI_NOTUSED(ri); + STBI_NOTUSED(tga_x_origin); // @TODO + STBI_NOTUSED(tga_y_origin); // @TODO + + if (tga_height > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + if (tga_width > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + + // do a tiny bit of precessing + if ( tga_image_type >= 8 ) + { + tga_image_type -= 8; + tga_is_RLE = 1; + } + tga_inverted = 1 - ((tga_inverted >> 5) & 1); + + // If I'm paletted, then I'll use the number of bits from the palette + if ( tga_indexed ) tga_comp = stbi__tga_get_comp(tga_palette_bits, 0, &tga_rgb16); + else tga_comp = stbi__tga_get_comp(tga_bits_per_pixel, (tga_image_type == 3), &tga_rgb16); + + if(!tga_comp) // shouldn't really happen, stbi__tga_test() should have ensured basic consistency + return stbi__errpuc("bad format", "Can't find out TGA pixelformat"); + + // tga info + *x = tga_width; + *y = tga_height; + if (comp) *comp = tga_comp; + + if (!stbi__mad3sizes_valid(tga_width, tga_height, tga_comp, 0)) + return stbi__errpuc("too large", "Corrupt TGA"); + + tga_data = (unsigned char*)stbi__malloc_mad3(tga_width, tga_height, tga_comp, 0); + if (!tga_data) return stbi__errpuc("outofmem", "Out of memory"); + + // skip to the data's starting position (offset usually = 0) + stbi__skip(s, tga_offset ); + + if ( !tga_indexed && !tga_is_RLE && !tga_rgb16 ) { + for (i=0; i < tga_height; ++i) { + int row = tga_inverted ? tga_height -i - 1 : i; + stbi_uc *tga_row = tga_data + row*tga_width*tga_comp; + stbi__getn(s, tga_row, tga_width * tga_comp); + } + } else { + // do I need to load a palette? + if ( tga_indexed) + { + if (tga_palette_len == 0) { /* you have to have at least one entry! */ + STBI_FREE(tga_data); + return stbi__errpuc("bad palette", "Corrupt TGA"); + } + + // any data to skip? (offset usually = 0) + stbi__skip(s, tga_palette_start ); + // load the palette + tga_palette = (unsigned char*)stbi__malloc_mad2(tga_palette_len, tga_comp, 0); + if (!tga_palette) { + STBI_FREE(tga_data); + return stbi__errpuc("outofmem", "Out of memory"); + } + if (tga_rgb16) { + stbi_uc *pal_entry = tga_palette; + STBI_ASSERT(tga_comp == STBI_rgb); + for (i=0; i < tga_palette_len; ++i) { + stbi__tga_read_rgb16(s, pal_entry); + pal_entry += tga_comp; + } + } else if (!stbi__getn(s, tga_palette, tga_palette_len * tga_comp)) { + STBI_FREE(tga_data); + STBI_FREE(tga_palette); + return stbi__errpuc("bad palette", "Corrupt TGA"); + } + } + // load the data + for (i=0; i < tga_width * tga_height; ++i) + { + // if I'm in RLE mode, do I need to get a RLE stbi__pngchunk? + if ( tga_is_RLE ) + { + if ( RLE_count == 0 ) + { + // yep, get the next byte as a RLE command + int RLE_cmd = stbi__get8(s); + RLE_count = 1 + (RLE_cmd & 127); + RLE_repeating = RLE_cmd >> 7; + read_next_pixel = 1; + } else if ( !RLE_repeating ) + { + read_next_pixel = 1; + } + } else + { + read_next_pixel = 1; + } + // OK, if I need to read a pixel, do it now + if ( read_next_pixel ) + { + // load however much data we did have + if ( tga_indexed ) + { + // read in index, then perform the lookup + int pal_idx = (tga_bits_per_pixel == 8) ? stbi__get8(s) : stbi__get16le(s); + if ( pal_idx >= tga_palette_len ) { + // invalid index + pal_idx = 0; + } + pal_idx *= tga_comp; + for (j = 0; j < tga_comp; ++j) { + raw_data[j] = tga_palette[pal_idx+j]; + } + } else if(tga_rgb16) { + STBI_ASSERT(tga_comp == STBI_rgb); + stbi__tga_read_rgb16(s, raw_data); + } else { + // read in the data raw + for (j = 0; j < tga_comp; ++j) { + raw_data[j] = stbi__get8(s); + } + } + // clear the reading flag for the next pixel + read_next_pixel = 0; + } // end of reading a pixel + + // copy data + for (j = 0; j < tga_comp; ++j) + tga_data[i*tga_comp+j] = raw_data[j]; + + // in case we're in RLE mode, keep counting down + --RLE_count; + } + // do I need to invert the image? + if ( tga_inverted ) + { + for (j = 0; j*2 < tga_height; ++j) + { + int index1 = j * tga_width * tga_comp; + int index2 = (tga_height - 1 - j) * tga_width * tga_comp; + for (i = tga_width * tga_comp; i > 0; --i) + { + unsigned char temp = tga_data[index1]; + tga_data[index1] = tga_data[index2]; + tga_data[index2] = temp; + ++index1; + ++index2; + } + } + } + // clear my palette, if I had one + if ( tga_palette != NULL ) + { + STBI_FREE( tga_palette ); + } + } + + // swap RGB - if the source data was RGB16, it already is in the right order + if (tga_comp >= 3 && !tga_rgb16) + { + unsigned char* tga_pixel = tga_data; + for (i=0; i < tga_width * tga_height; ++i) + { + unsigned char temp = tga_pixel[0]; + tga_pixel[0] = tga_pixel[2]; + tga_pixel[2] = temp; + tga_pixel += tga_comp; + } + } + + // convert to target component count + if (req_comp && req_comp != tga_comp) + tga_data = stbi__convert_format(tga_data, tga_comp, req_comp, tga_width, tga_height); + + // the things I do to get rid of an error message, and yet keep + // Microsoft's C compilers happy... [8^( + tga_palette_start = tga_palette_len = tga_palette_bits = + tga_x_origin = tga_y_origin = 0; + STBI_NOTUSED(tga_palette_start); + // OK, done + return tga_data; +} +#endif + +// ************************************************************************************************* +// Photoshop PSD loader -- PD by Thatcher Ulrich, integration by Nicolas Schulz, tweaked by STB + +#ifndef STBI_NO_PSD +static int stbi__psd_test(stbi__context *s) +{ + int r = (stbi__get32be(s) == 0x38425053); + stbi__rewind(s); + return r; +} + +static int stbi__psd_decode_rle(stbi__context *s, stbi_uc *p, int pixelCount) +{ + int count, nleft, len; + + count = 0; + while ((nleft = pixelCount - count) > 0) { + len = stbi__get8(s); + if (len == 128) { + // No-op. + } else if (len < 128) { + // Copy next len+1 bytes literally. + len++; + if (len > nleft) return 0; // corrupt data + count += len; + while (len) { + *p = stbi__get8(s); + p += 4; + len--; + } + } else if (len > 128) { + stbi_uc val; + // Next -len+1 bytes in the dest are replicated from next source byte. + // (Interpret len as a negative 8-bit int.) + len = 257 - len; + if (len > nleft) return 0; // corrupt data + val = stbi__get8(s); + count += len; + while (len) { + *p = val; + p += 4; + len--; + } + } + } + + return 1; +} + +static void *stbi__psd_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri, int bpc) +{ + int pixelCount; + int channelCount, compression; + int channel, i; + int bitdepth; + int w,h; + stbi_uc *out; + STBI_NOTUSED(ri); + + // Check identifier + if (stbi__get32be(s) != 0x38425053) // "8BPS" + return stbi__errpuc("not PSD", "Corrupt PSD image"); + + // Check file type version. + if (stbi__get16be(s) != 1) + return stbi__errpuc("wrong version", "Unsupported version of PSD image"); + + // Skip 6 reserved bytes. + stbi__skip(s, 6 ); + + // Read the number of channels (R, G, B, A, etc). + channelCount = stbi__get16be(s); + if (channelCount < 0 || channelCount > 16) + return stbi__errpuc("wrong channel count", "Unsupported number of channels in PSD image"); + + // Read the rows and columns of the image. + h = stbi__get32be(s); + w = stbi__get32be(s); + + if (h > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + if (w > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + + // Make sure the depth is 8 bits. + bitdepth = stbi__get16be(s); + if (bitdepth != 8 && bitdepth != 16) + return stbi__errpuc("unsupported bit depth", "PSD bit depth is not 8 or 16 bit"); + + // Make sure the color mode is RGB. + // Valid options are: + // 0: Bitmap + // 1: Grayscale + // 2: Indexed color + // 3: RGB color + // 4: CMYK color + // 7: Multichannel + // 8: Duotone + // 9: Lab color + if (stbi__get16be(s) != 3) + return stbi__errpuc("wrong color format", "PSD is not in RGB color format"); + + // Skip the Mode Data. (It's the palette for indexed color; other info for other modes.) + stbi__skip(s,stbi__get32be(s) ); + + // Skip the image resources. (resolution, pen tool paths, etc) + stbi__skip(s, stbi__get32be(s) ); + + // Skip the reserved data. + stbi__skip(s, stbi__get32be(s) ); + + // Find out if the data is compressed. + // Known values: + // 0: no compression + // 1: RLE compressed + compression = stbi__get16be(s); + if (compression > 1) + return stbi__errpuc("bad compression", "PSD has an unknown compression format"); + + // Check size + if (!stbi__mad3sizes_valid(4, w, h, 0)) + return stbi__errpuc("too large", "Corrupt PSD"); + + // Create the destination image. + + if (!compression && bitdepth == 16 && bpc == 16) { + out = (stbi_uc *) stbi__malloc_mad3(8, w, h, 0); + ri->bits_per_channel = 16; + } else + out = (stbi_uc *) stbi__malloc(4 * w*h); + + if (!out) return stbi__errpuc("outofmem", "Out of memory"); + pixelCount = w*h; + + // Initialize the data to zero. + //memset( out, 0, pixelCount * 4 ); + + // Finally, the image data. + if (compression) { + // RLE as used by .PSD and .TIFF + // Loop until you get the number of unpacked bytes you are expecting: + // Read the next source byte into n. + // If n is between 0 and 127 inclusive, copy the next n+1 bytes literally. + // Else if n is between -127 and -1 inclusive, copy the next byte -n+1 times. + // Else if n is 128, noop. + // Endloop + + // The RLE-compressed data is preceded by a 2-byte data count for each row in the data, + // which we're going to just skip. + stbi__skip(s, h * channelCount * 2 ); + + // Read the RLE data by channel. + for (channel = 0; channel < 4; channel++) { + stbi_uc *p; + + p = out+channel; + if (channel >= channelCount) { + // Fill this channel with default data. + for (i = 0; i < pixelCount; i++, p += 4) + *p = (channel == 3 ? 255 : 0); + } else { + // Read the RLE data. + if (!stbi__psd_decode_rle(s, p, pixelCount)) { + STBI_FREE(out); + return stbi__errpuc("corrupt", "bad RLE data"); + } + } + } + + } else { + // We're at the raw image data. It's each channel in order (Red, Green, Blue, Alpha, ...) + // where each channel consists of an 8-bit (or 16-bit) value for each pixel in the image. + + // Read the data by channel. + for (channel = 0; channel < 4; channel++) { + if (channel >= channelCount) { + // Fill this channel with default data. + if (bitdepth == 16 && bpc == 16) { + stbi__uint16 *q = ((stbi__uint16 *) out) + channel; + stbi__uint16 val = channel == 3 ? 65535 : 0; + for (i = 0; i < pixelCount; i++, q += 4) + *q = val; + } else { + stbi_uc *p = out+channel; + stbi_uc val = channel == 3 ? 255 : 0; + for (i = 0; i < pixelCount; i++, p += 4) + *p = val; + } + } else { + if (ri->bits_per_channel == 16) { // output bpc + stbi__uint16 *q = ((stbi__uint16 *) out) + channel; + for (i = 0; i < pixelCount; i++, q += 4) + *q = (stbi__uint16) stbi__get16be(s); + } else { + stbi_uc *p = out+channel; + if (bitdepth == 16) { // input bpc + for (i = 0; i < pixelCount; i++, p += 4) + *p = (stbi_uc) (stbi__get16be(s) >> 8); + } else { + for (i = 0; i < pixelCount; i++, p += 4) + *p = stbi__get8(s); + } + } + } + } + } + + // remove weird white matte from PSD + if (channelCount >= 4) { + if (ri->bits_per_channel == 16) { + for (i=0; i < w*h; ++i) { + stbi__uint16 *pixel = (stbi__uint16 *) out + 4*i; + if (pixel[3] != 0 && pixel[3] != 65535) { + float a = pixel[3] / 65535.0f; + float ra = 1.0f / a; + float inv_a = 65535.0f * (1 - ra); + pixel[0] = (stbi__uint16) (pixel[0]*ra + inv_a); + pixel[1] = (stbi__uint16) (pixel[1]*ra + inv_a); + pixel[2] = (stbi__uint16) (pixel[2]*ra + inv_a); + } + } + } else { + for (i=0; i < w*h; ++i) { + unsigned char *pixel = out + 4*i; + if (pixel[3] != 0 && pixel[3] != 255) { + float a = pixel[3] / 255.0f; + float ra = 1.0f / a; + float inv_a = 255.0f * (1 - ra); + pixel[0] = (unsigned char) (pixel[0]*ra + inv_a); + pixel[1] = (unsigned char) (pixel[1]*ra + inv_a); + pixel[2] = (unsigned char) (pixel[2]*ra + inv_a); + } + } + } + } + + // convert to desired output format + if (req_comp && req_comp != 4) { + if (ri->bits_per_channel == 16) + out = (stbi_uc *) stbi__convert_format16((stbi__uint16 *) out, 4, req_comp, w, h); + else + out = stbi__convert_format(out, 4, req_comp, w, h); + if (out == NULL) return out; // stbi__convert_format frees input on failure + } + + if (comp) *comp = 4; + *y = h; + *x = w; + + return out; +} +#endif + +// ************************************************************************************************* +// Softimage PIC loader +// by Tom Seddon +// +// See http://softimage.wiki.softimage.com/index.php/INFO:_PIC_file_format +// See http://ozviz.wasp.uwa.edu.au/~pbourke/dataformats/softimagepic/ + +#ifndef STBI_NO_PIC +static int stbi__pic_is4(stbi__context *s,const char *str) +{ + int i; + for (i=0; i<4; ++i) + if (stbi__get8(s) != (stbi_uc)str[i]) + return 0; + + return 1; +} + +static int stbi__pic_test_core(stbi__context *s) +{ + int i; + + if (!stbi__pic_is4(s,"\x53\x80\xF6\x34")) + return 0; + + for(i=0;i<84;++i) + stbi__get8(s); + + if (!stbi__pic_is4(s,"PICT")) + return 0; + + return 1; +} + +typedef struct +{ + stbi_uc size,type,channel; +} stbi__pic_packet; + +static stbi_uc *stbi__readval(stbi__context *s, int channel, stbi_uc *dest) +{ + int mask=0x80, i; + + for (i=0; i<4; ++i, mask>>=1) { + if (channel & mask) { + if (stbi__at_eof(s)) return stbi__errpuc("bad file","PIC file too short"); + dest[i]=stbi__get8(s); + } + } + + return dest; +} + +static void stbi__copyval(int channel,stbi_uc *dest,const stbi_uc *src) +{ + int mask=0x80,i; + + for (i=0;i<4; ++i, mask>>=1) + if (channel&mask) + dest[i]=src[i]; +} + +static stbi_uc *stbi__pic_load_core(stbi__context *s,int width,int height,int *comp, stbi_uc *result) +{ + int act_comp=0,num_packets=0,y,chained; + stbi__pic_packet packets[10]; + + // this will (should...) cater for even some bizarre stuff like having data + // for the same channel in multiple packets. + do { + stbi__pic_packet *packet; + + if (num_packets==sizeof(packets)/sizeof(packets[0])) + return stbi__errpuc("bad format","too many packets"); + + packet = &packets[num_packets++]; + + chained = stbi__get8(s); + packet->size = stbi__get8(s); + packet->type = stbi__get8(s); + packet->channel = stbi__get8(s); + + act_comp |= packet->channel; + + if (stbi__at_eof(s)) return stbi__errpuc("bad file","file too short (reading packets)"); + if (packet->size != 8) return stbi__errpuc("bad format","packet isn't 8bpp"); + } while (chained); + + *comp = (act_comp & 0x10 ? 4 : 3); // has alpha channel? + + for(y=0; ytype) { + default: + return stbi__errpuc("bad format","packet has bad compression type"); + + case 0: {//uncompressed + int x; + + for(x=0;xchannel,dest)) + return 0; + break; + } + + case 1://Pure RLE + { + int left=width, i; + + while (left>0) { + stbi_uc count,value[4]; + + count=stbi__get8(s); + if (stbi__at_eof(s)) return stbi__errpuc("bad file","file too short (pure read count)"); + + if (count > left) + count = (stbi_uc) left; + + if (!stbi__readval(s,packet->channel,value)) return 0; + + for(i=0; ichannel,dest,value); + left -= count; + } + } + break; + + case 2: {//Mixed RLE + int left=width; + while (left>0) { + int count = stbi__get8(s), i; + if (stbi__at_eof(s)) return stbi__errpuc("bad file","file too short (mixed read count)"); + + if (count >= 128) { // Repeated + stbi_uc value[4]; + + if (count==128) + count = stbi__get16be(s); + else + count -= 127; + if (count > left) + return stbi__errpuc("bad file","scanline overrun"); + + if (!stbi__readval(s,packet->channel,value)) + return 0; + + for(i=0;ichannel,dest,value); + } else { // Raw + ++count; + if (count>left) return stbi__errpuc("bad file","scanline overrun"); + + for(i=0;ichannel,dest)) + return 0; + } + left-=count; + } + break; + } + } + } + } + + return result; +} + +static void *stbi__pic_load(stbi__context *s,int *px,int *py,int *comp,int req_comp, stbi__result_info *ri) +{ + stbi_uc *result; + int i, x,y, internal_comp; + STBI_NOTUSED(ri); + + if (!comp) comp = &internal_comp; + + for (i=0; i<92; ++i) + stbi__get8(s); + + x = stbi__get16be(s); + y = stbi__get16be(s); + + if (y > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + if (x > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + + if (stbi__at_eof(s)) return stbi__errpuc("bad file","file too short (pic header)"); + if (!stbi__mad3sizes_valid(x, y, 4, 0)) return stbi__errpuc("too large", "PIC image too large to decode"); + + stbi__get32be(s); //skip `ratio' + stbi__get16be(s); //skip `fields' + stbi__get16be(s); //skip `pad' + + // intermediate buffer is RGBA + result = (stbi_uc *) stbi__malloc_mad3(x, y, 4, 0); + if (!result) return stbi__errpuc("outofmem", "Out of memory"); + memset(result, 0xff, x*y*4); + + if (!stbi__pic_load_core(s,x,y,comp, result)) { + STBI_FREE(result); + result=0; + } + *px = x; + *py = y; + if (req_comp == 0) req_comp = *comp; + result=stbi__convert_format(result,4,req_comp,x,y); + + return result; +} + +static int stbi__pic_test(stbi__context *s) +{ + int r = stbi__pic_test_core(s); + stbi__rewind(s); + return r; +} +#endif + +// ************************************************************************************************* +// GIF loader -- public domain by Jean-Marc Lienher -- simplified/shrunk by stb + +#ifndef STBI_NO_GIF +typedef struct +{ + stbi__int16 prefix; + stbi_uc first; + stbi_uc suffix; +} stbi__gif_lzw; + +typedef struct +{ + int w,h; + stbi_uc *out; // output buffer (always 4 components) + stbi_uc *background; // The current "background" as far as a gif is concerned + stbi_uc *history; + int flags, bgindex, ratio, transparent, eflags; + stbi_uc pal[256][4]; + stbi_uc lpal[256][4]; + stbi__gif_lzw codes[8192]; + stbi_uc *color_table; + int parse, step; + int lflags; + int start_x, start_y; + int max_x, max_y; + int cur_x, cur_y; + int line_size; + int delay; +} stbi__gif; + +static int stbi__gif_test_raw(stbi__context *s) +{ + int sz; + if (stbi__get8(s) != 'G' || stbi__get8(s) != 'I' || stbi__get8(s) != 'F' || stbi__get8(s) != '8') return 0; + sz = stbi__get8(s); + if (sz != '9' && sz != '7') return 0; + if (stbi__get8(s) != 'a') return 0; + return 1; +} + +static int stbi__gif_test(stbi__context *s) +{ + int r = stbi__gif_test_raw(s); + stbi__rewind(s); + return r; +} + +static void stbi__gif_parse_colortable(stbi__context *s, stbi_uc pal[256][4], int num_entries, int transp) +{ + int i; + for (i=0; i < num_entries; ++i) { + pal[i][2] = stbi__get8(s); + pal[i][1] = stbi__get8(s); + pal[i][0] = stbi__get8(s); + pal[i][3] = transp == i ? 0 : 255; + } +} + +static int stbi__gif_header(stbi__context *s, stbi__gif *g, int *comp, int is_info) +{ + stbi_uc version; + if (stbi__get8(s) != 'G' || stbi__get8(s) != 'I' || stbi__get8(s) != 'F' || stbi__get8(s) != '8') + return stbi__err("not GIF", "Corrupt GIF"); + + version = stbi__get8(s); + if (version != '7' && version != '9') return stbi__err("not GIF", "Corrupt GIF"); + if (stbi__get8(s) != 'a') return stbi__err("not GIF", "Corrupt GIF"); + + stbi__g_failure_reason = ""; + g->w = stbi__get16le(s); + g->h = stbi__get16le(s); + g->flags = stbi__get8(s); + g->bgindex = stbi__get8(s); + g->ratio = stbi__get8(s); + g->transparent = -1; + + if (g->w > STBI_MAX_DIMENSIONS) return stbi__err("too large","Very large image (corrupt?)"); + if (g->h > STBI_MAX_DIMENSIONS) return stbi__err("too large","Very large image (corrupt?)"); + + if (comp != 0) *comp = 4; // can't actually tell whether it's 3 or 4 until we parse the comments + + if (is_info) return 1; + + if (g->flags & 0x80) + stbi__gif_parse_colortable(s,g->pal, 2 << (g->flags & 7), -1); + + return 1; +} + +static int stbi__gif_info_raw(stbi__context *s, int *x, int *y, int *comp) +{ + stbi__gif* g = (stbi__gif*) stbi__malloc(sizeof(stbi__gif)); + if (!g) return stbi__err("outofmem", "Out of memory"); + if (!stbi__gif_header(s, g, comp, 1)) { + STBI_FREE(g); + stbi__rewind( s ); + return 0; + } + if (x) *x = g->w; + if (y) *y = g->h; + STBI_FREE(g); + return 1; +} + +static void stbi__out_gif_code(stbi__gif *g, stbi__uint16 code) +{ + stbi_uc *p, *c; + int idx; + + // recurse to decode the prefixes, since the linked-list is backwards, + // and working backwards through an interleaved image would be nasty + if (g->codes[code].prefix >= 0) + stbi__out_gif_code(g, g->codes[code].prefix); + + if (g->cur_y >= g->max_y) return; + + idx = g->cur_x + g->cur_y; + p = &g->out[idx]; + g->history[idx / 4] = 1; + + c = &g->color_table[g->codes[code].suffix * 4]; + if (c[3] > 128) { // don't render transparent pixels; + p[0] = c[2]; + p[1] = c[1]; + p[2] = c[0]; + p[3] = c[3]; + } + g->cur_x += 4; + + if (g->cur_x >= g->max_x) { + g->cur_x = g->start_x; + g->cur_y += g->step; + + while (g->cur_y >= g->max_y && g->parse > 0) { + g->step = (1 << g->parse) * g->line_size; + g->cur_y = g->start_y + (g->step >> 1); + --g->parse; + } + } +} + +static stbi_uc *stbi__process_gif_raster(stbi__context *s, stbi__gif *g) +{ + stbi_uc lzw_cs; + stbi__int32 len, init_code; + stbi__uint32 first; + stbi__int32 codesize, codemask, avail, oldcode, bits, valid_bits, clear; + stbi__gif_lzw *p; + + lzw_cs = stbi__get8(s); + if (lzw_cs > 12) return NULL; + clear = 1 << lzw_cs; + first = 1; + codesize = lzw_cs + 1; + codemask = (1 << codesize) - 1; + bits = 0; + valid_bits = 0; + for (init_code = 0; init_code < clear; init_code++) { + g->codes[init_code].prefix = -1; + g->codes[init_code].first = (stbi_uc) init_code; + g->codes[init_code].suffix = (stbi_uc) init_code; + } + + // support no starting clear code + avail = clear+2; + oldcode = -1; + + len = 0; + for(;;) { + if (valid_bits < codesize) { + if (len == 0) { + len = stbi__get8(s); // start new block + if (len == 0) + return g->out; + } + --len; + bits |= (stbi__int32) stbi__get8(s) << valid_bits; + valid_bits += 8; + } else { + stbi__int32 code = bits & codemask; + bits >>= codesize; + valid_bits -= codesize; + // @OPTIMIZE: is there some way we can accelerate the non-clear path? + if (code == clear) { // clear code + codesize = lzw_cs + 1; + codemask = (1 << codesize) - 1; + avail = clear + 2; + oldcode = -1; + first = 0; + } else if (code == clear + 1) { // end of stream code + stbi__skip(s, len); + while ((len = stbi__get8(s)) > 0) + stbi__skip(s,len); + return g->out; + } else if (code <= avail) { + if (first) { + return stbi__errpuc("no clear code", "Corrupt GIF"); + } + + if (oldcode >= 0) { + p = &g->codes[avail++]; + if (avail > 8192) { + return stbi__errpuc("too many codes", "Corrupt GIF"); + } + + p->prefix = (stbi__int16) oldcode; + p->first = g->codes[oldcode].first; + p->suffix = (code == avail) ? p->first : g->codes[code].first; + } else if (code == avail) + return stbi__errpuc("illegal code in raster", "Corrupt GIF"); + + stbi__out_gif_code(g, (stbi__uint16) code); + + if ((avail & codemask) == 0 && avail <= 0x0FFF) { + codesize++; + codemask = (1 << codesize) - 1; + } + + oldcode = code; + } else { + return stbi__errpuc("illegal code in raster", "Corrupt GIF"); + } + } + } +} + +// this function is designed to support animated gifs, although stb_image doesn't support it +// two back is the image from two frames ago, used for a very specific disposal format +static stbi_uc *stbi__gif_load_next(stbi__context *s, stbi__gif *g, int *comp, int req_comp, stbi_uc *two_back) +{ + int dispose; + int first_frame; + int pi; + int pcount; + STBI_NOTUSED(req_comp); + + // on first frame, any non-written pixels get the background colour (non-transparent) + first_frame = 0; + if (g->out == 0) { + if (!stbi__gif_header(s, g, comp,0)) return 0; // stbi__g_failure_reason set by stbi__gif_header + if (!stbi__mad3sizes_valid(4, g->w, g->h, 0)) + return stbi__errpuc("too large", "GIF image is too large"); + pcount = g->w * g->h; + g->out = (stbi_uc *) stbi__malloc(4 * pcount); + g->background = (stbi_uc *) stbi__malloc(4 * pcount); + g->history = (stbi_uc *) stbi__malloc(pcount); + if (!g->out || !g->background || !g->history) + return stbi__errpuc("outofmem", "Out of memory"); + + // image is treated as "transparent" at the start - ie, nothing overwrites the current background; + // background colour is only used for pixels that are not rendered first frame, after that "background" + // color refers to the color that was there the previous frame. + memset(g->out, 0x00, 4 * pcount); + memset(g->background, 0x00, 4 * pcount); // state of the background (starts transparent) + memset(g->history, 0x00, pcount); // pixels that were affected previous frame + first_frame = 1; + } else { + // second frame - how do we dispose of the previous one? + dispose = (g->eflags & 0x1C) >> 2; + pcount = g->w * g->h; + + if ((dispose == 3) && (two_back == 0)) { + dispose = 2; // if I don't have an image to revert back to, default to the old background + } + + if (dispose == 3) { // use previous graphic + for (pi = 0; pi < pcount; ++pi) { + if (g->history[pi]) { + memcpy( &g->out[pi * 4], &two_back[pi * 4], 4 ); + } + } + } else if (dispose == 2) { + // restore what was changed last frame to background before that frame; + for (pi = 0; pi < pcount; ++pi) { + if (g->history[pi]) { + memcpy( &g->out[pi * 4], &g->background[pi * 4], 4 ); + } + } + } else { + // This is a non-disposal case eithe way, so just + // leave the pixels as is, and they will become the new background + // 1: do not dispose + // 0: not specified. + } + + // background is what out is after the undoing of the previou frame; + memcpy( g->background, g->out, 4 * g->w * g->h ); + } + + // clear my history; + memset( g->history, 0x00, g->w * g->h ); // pixels that were affected previous frame + + for (;;) { + int tag = stbi__get8(s); + switch (tag) { + case 0x2C: /* Image Descriptor */ + { + stbi__int32 x, y, w, h; + stbi_uc *o; + + x = stbi__get16le(s); + y = stbi__get16le(s); + w = stbi__get16le(s); + h = stbi__get16le(s); + if (((x + w) > (g->w)) || ((y + h) > (g->h))) + return stbi__errpuc("bad Image Descriptor", "Corrupt GIF"); + + g->line_size = g->w * 4; + g->start_x = x * 4; + g->start_y = y * g->line_size; + g->max_x = g->start_x + w * 4; + g->max_y = g->start_y + h * g->line_size; + g->cur_x = g->start_x; + g->cur_y = g->start_y; + + // if the width of the specified rectangle is 0, that means + // we may not see *any* pixels or the image is malformed; + // to make sure this is caught, move the current y down to + // max_y (which is what out_gif_code checks). + if (w == 0) + g->cur_y = g->max_y; + + g->lflags = stbi__get8(s); + + if (g->lflags & 0x40) { + g->step = 8 * g->line_size; // first interlaced spacing + g->parse = 3; + } else { + g->step = g->line_size; + g->parse = 0; + } + + if (g->lflags & 0x80) { + stbi__gif_parse_colortable(s,g->lpal, 2 << (g->lflags & 7), g->eflags & 0x01 ? g->transparent : -1); + g->color_table = (stbi_uc *) g->lpal; + } else if (g->flags & 0x80) { + g->color_table = (stbi_uc *) g->pal; + } else + return stbi__errpuc("missing color table", "Corrupt GIF"); + + o = stbi__process_gif_raster(s, g); + if (!o) return NULL; + + // if this was the first frame, + pcount = g->w * g->h; + if (first_frame && (g->bgindex > 0)) { + // if first frame, any pixel not drawn to gets the background color + for (pi = 0; pi < pcount; ++pi) { + if (g->history[pi] == 0) { + g->pal[g->bgindex][3] = 255; // just in case it was made transparent, undo that; It will be reset next frame if need be; + memcpy( &g->out[pi * 4], &g->pal[g->bgindex], 4 ); + } + } + } + + return o; + } + + case 0x21: // Comment Extension. + { + int len; + int ext = stbi__get8(s); + if (ext == 0xF9) { // Graphic Control Extension. + len = stbi__get8(s); + if (len == 4) { + g->eflags = stbi__get8(s); + g->delay = 10 * stbi__get16le(s); // delay - 1/100th of a second, saving as 1/1000ths. + + // unset old transparent + if (g->transparent >= 0) { + g->pal[g->transparent][3] = 255; + } + if (g->eflags & 0x01) { + g->transparent = stbi__get8(s); + if (g->transparent >= 0) { + g->pal[g->transparent][3] = 0; + } + } else { + // don't need transparent + stbi__skip(s, 1); + g->transparent = -1; + } + } else { + stbi__skip(s, len); + break; + } + } + while ((len = stbi__get8(s)) != 0) { + stbi__skip(s, len); + } + break; + } + + case 0x3B: // gif stream termination code + return (stbi_uc *) s; // using '1' causes warning on some compilers + + default: + return stbi__errpuc("unknown code", "Corrupt GIF"); + } + } +} + +static void *stbi__load_gif_main_outofmem(stbi__gif *g, stbi_uc *out, int **delays) +{ + STBI_FREE(g->out); + STBI_FREE(g->history); + STBI_FREE(g->background); + + if (out) STBI_FREE(out); + if (delays && *delays) STBI_FREE(*delays); + return stbi__errpuc("outofmem", "Out of memory"); +} + +static void *stbi__load_gif_main(stbi__context *s, int **delays, int *x, int *y, int *z, int *comp, int req_comp) +{ + if (stbi__gif_test(s)) { + int layers = 0; + stbi_uc *u = 0; + stbi_uc *out = 0; + stbi_uc *two_back = 0; + stbi__gif g; + int stride; + int out_size = 0; + int delays_size = 0; + + STBI_NOTUSED(out_size); + STBI_NOTUSED(delays_size); + + memset(&g, 0, sizeof(g)); + if (delays) { + *delays = 0; + } + + do { + u = stbi__gif_load_next(s, &g, comp, req_comp, two_back); + if (u == (stbi_uc *) s) u = 0; // end of animated gif marker + + if (u) { + *x = g.w; + *y = g.h; + ++layers; + stride = g.w * g.h * 4; + + if (out) { + void *tmp = (stbi_uc*) STBI_REALLOC_SIZED( out, out_size, layers * stride ); + if (!tmp) + return stbi__load_gif_main_outofmem(&g, out, delays); + else { + out = (stbi_uc*) tmp; + out_size = layers * stride; + } + + if (delays) { + int *new_delays = (int*) STBI_REALLOC_SIZED( *delays, delays_size, sizeof(int) * layers ); + if (!new_delays) + return stbi__load_gif_main_outofmem(&g, out, delays); + *delays = new_delays; + delays_size = layers * sizeof(int); + } + } else { + out = (stbi_uc*)stbi__malloc( layers * stride ); + if (!out) + return stbi__load_gif_main_outofmem(&g, out, delays); + out_size = layers * stride; + if (delays) { + *delays = (int*) stbi__malloc( layers * sizeof(int) ); + if (!*delays) + return stbi__load_gif_main_outofmem(&g, out, delays); + delays_size = layers * sizeof(int); + } + } + memcpy( out + ((layers - 1) * stride), u, stride ); + if (layers >= 2) { + two_back = out - 2 * stride; + } + + if (delays) { + (*delays)[layers - 1U] = g.delay; + } + } + } while (u != 0); + + // free temp buffer; + STBI_FREE(g.out); + STBI_FREE(g.history); + STBI_FREE(g.background); + + // do the final conversion after loading everything; + if (req_comp && req_comp != 4) + out = stbi__convert_format(out, 4, req_comp, layers * g.w, g.h); + + *z = layers; + return out; + } else { + return stbi__errpuc("not GIF", "Image was not as a gif type."); + } +} + +static void *stbi__gif_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri) +{ + stbi_uc *u = 0; + stbi__gif g; + memset(&g, 0, sizeof(g)); + STBI_NOTUSED(ri); + + u = stbi__gif_load_next(s, &g, comp, req_comp, 0); + if (u == (stbi_uc *) s) u = 0; // end of animated gif marker + if (u) { + *x = g.w; + *y = g.h; + + // moved conversion to after successful load so that the same + // can be done for multiple frames. + if (req_comp && req_comp != 4) + u = stbi__convert_format(u, 4, req_comp, g.w, g.h); + } else if (g.out) { + // if there was an error and we allocated an image buffer, free it! + STBI_FREE(g.out); + } + + // free buffers needed for multiple frame loading; + STBI_FREE(g.history); + STBI_FREE(g.background); + + return u; +} + +static int stbi__gif_info(stbi__context *s, int *x, int *y, int *comp) +{ + return stbi__gif_info_raw(s,x,y,comp); +} +#endif + +// ************************************************************************************************* +// Radiance RGBE HDR loader +// originally by Nicolas Schulz +#ifndef STBI_NO_HDR +static int stbi__hdr_test_core(stbi__context *s, const char *signature) +{ + int i; + for (i=0; signature[i]; ++i) + if (stbi__get8(s) != signature[i]) + return 0; + stbi__rewind(s); + return 1; +} + +static int stbi__hdr_test(stbi__context* s) +{ + int r = stbi__hdr_test_core(s, "#?RADIANCE\n"); + stbi__rewind(s); + if(!r) { + r = stbi__hdr_test_core(s, "#?RGBE\n"); + stbi__rewind(s); + } + return r; +} + +#define STBI__HDR_BUFLEN 1024 +static char *stbi__hdr_gettoken(stbi__context *z, char *buffer) +{ + int len=0; + char c = '\0'; + + c = (char) stbi__get8(z); + + while (!stbi__at_eof(z) && c != '\n') { + buffer[len++] = c; + if (len == STBI__HDR_BUFLEN-1) { + // flush to end of line + while (!stbi__at_eof(z) && stbi__get8(z) != '\n') + ; + break; + } + c = (char) stbi__get8(z); + } + + buffer[len] = 0; + return buffer; +} + +static void stbi__hdr_convert(float *output, stbi_uc *input, int req_comp) +{ + if ( input[3] != 0 ) { + float f1; + // Exponent + f1 = (float) ldexp(1.0f, input[3] - (int)(128 + 8)); + if (req_comp <= 2) + output[0] = (input[0] + input[1] + input[2]) * f1 / 3; + else { + output[0] = input[0] * f1; + output[1] = input[1] * f1; + output[2] = input[2] * f1; + } + if (req_comp == 2) output[1] = 1; + if (req_comp == 4) output[3] = 1; + } else { + switch (req_comp) { + case 4: output[3] = 1; /* fallthrough */ + case 3: output[0] = output[1] = output[2] = 0; + break; + case 2: output[1] = 1; /* fallthrough */ + case 1: output[0] = 0; + break; + } + } +} + +static float *stbi__hdr_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri) +{ + char buffer[STBI__HDR_BUFLEN]; + char *token; + int valid = 0; + int width, height; + stbi_uc *scanline; + float *hdr_data; + int len; + unsigned char count, value; + int i, j, k, c1,c2, z; + const char *headerToken; + STBI_NOTUSED(ri); + + // Check identifier + headerToken = stbi__hdr_gettoken(s,buffer); + if (strcmp(headerToken, "#?RADIANCE") != 0 && strcmp(headerToken, "#?RGBE") != 0) + return stbi__errpf("not HDR", "Corrupt HDR image"); + + // Parse header + for(;;) { + token = stbi__hdr_gettoken(s,buffer); + if (token[0] == 0) break; + if (strcmp(token, "FORMAT=32-bit_rle_rgbe") == 0) valid = 1; + } + + if (!valid) return stbi__errpf("unsupported format", "Unsupported HDR format"); + + // Parse width and height + // can't use sscanf() if we're not using stdio! + token = stbi__hdr_gettoken(s,buffer); + if (strncmp(token, "-Y ", 3)) return stbi__errpf("unsupported data layout", "Unsupported HDR format"); + token += 3; + height = (int) strtol(token, &token, 10); + while (*token == ' ') ++token; + if (strncmp(token, "+X ", 3)) return stbi__errpf("unsupported data layout", "Unsupported HDR format"); + token += 3; + width = (int) strtol(token, NULL, 10); + + if (height > STBI_MAX_DIMENSIONS) return stbi__errpf("too large","Very large image (corrupt?)"); + if (width > STBI_MAX_DIMENSIONS) return stbi__errpf("too large","Very large image (corrupt?)"); + + *x = width; + *y = height; + + if (comp) *comp = 3; + if (req_comp == 0) req_comp = 3; + + if (!stbi__mad4sizes_valid(width, height, req_comp, sizeof(float), 0)) + return stbi__errpf("too large", "HDR image is too large"); + + // Read data + hdr_data = (float *) stbi__malloc_mad4(width, height, req_comp, sizeof(float), 0); + if (!hdr_data) + return stbi__errpf("outofmem", "Out of memory"); + + // Load image data + // image data is stored as some number of sca + if ( width < 8 || width >= 32768) { + // Read flat data + for (j=0; j < height; ++j) { + for (i=0; i < width; ++i) { + stbi_uc rgbe[4]; + main_decode_loop: + stbi__getn(s, rgbe, 4); + stbi__hdr_convert(hdr_data + j * width * req_comp + i * req_comp, rgbe, req_comp); + } + } + } else { + // Read RLE-encoded data + scanline = NULL; + + for (j = 0; j < height; ++j) { + c1 = stbi__get8(s); + c2 = stbi__get8(s); + len = stbi__get8(s); + if (c1 != 2 || c2 != 2 || (len & 0x80)) { + // not run-length encoded, so we have to actually use THIS data as a decoded + // pixel (note this can't be a valid pixel--one of RGB must be >= 128) + stbi_uc rgbe[4]; + rgbe[0] = (stbi_uc) c1; + rgbe[1] = (stbi_uc) c2; + rgbe[2] = (stbi_uc) len; + rgbe[3] = (stbi_uc) stbi__get8(s); + stbi__hdr_convert(hdr_data, rgbe, req_comp); + i = 1; + j = 0; + STBI_FREE(scanline); + goto main_decode_loop; // yes, this makes no sense + } + len <<= 8; + len |= stbi__get8(s); + if (len != width) { STBI_FREE(hdr_data); STBI_FREE(scanline); return stbi__errpf("invalid decoded scanline length", "corrupt HDR"); } + if (scanline == NULL) { + scanline = (stbi_uc *) stbi__malloc_mad2(width, 4, 0); + if (!scanline) { + STBI_FREE(hdr_data); + return stbi__errpf("outofmem", "Out of memory"); + } + } + + for (k = 0; k < 4; ++k) { + int nleft; + i = 0; + while ((nleft = width - i) > 0) { + count = stbi__get8(s); + if (count > 128) { + // Run + value = stbi__get8(s); + count -= 128; + if ((count == 0) || (count > nleft)) { STBI_FREE(hdr_data); STBI_FREE(scanline); return stbi__errpf("corrupt", "bad RLE data in HDR"); } + for (z = 0; z < count; ++z) + scanline[i++ * 4 + k] = value; + } else { + // Dump + if ((count == 0) || (count > nleft)) { STBI_FREE(hdr_data); STBI_FREE(scanline); return stbi__errpf("corrupt", "bad RLE data in HDR"); } + for (z = 0; z < count; ++z) + scanline[i++ * 4 + k] = stbi__get8(s); + } + } + } + for (i=0; i < width; ++i) + stbi__hdr_convert(hdr_data+(j*width + i)*req_comp, scanline + i*4, req_comp); + } + if (scanline) + STBI_FREE(scanline); + } + + return hdr_data; +} + +static int stbi__hdr_info(stbi__context *s, int *x, int *y, int *comp) +{ + char buffer[STBI__HDR_BUFLEN]; + char *token; + int valid = 0; + int dummy; + + if (!x) x = &dummy; + if (!y) y = &dummy; + if (!comp) comp = &dummy; + + if (stbi__hdr_test(s) == 0) { + stbi__rewind( s ); + return 0; + } + + for(;;) { + token = stbi__hdr_gettoken(s,buffer); + if (token[0] == 0) break; + if (strcmp(token, "FORMAT=32-bit_rle_rgbe") == 0) valid = 1; + } + + if (!valid) { + stbi__rewind( s ); + return 0; + } + token = stbi__hdr_gettoken(s,buffer); + if (strncmp(token, "-Y ", 3)) { + stbi__rewind( s ); + return 0; + } + token += 3; + *y = (int) strtol(token, &token, 10); + while (*token == ' ') ++token; + if (strncmp(token, "+X ", 3)) { + stbi__rewind( s ); + return 0; + } + token += 3; + *x = (int) strtol(token, NULL, 10); + *comp = 3; + return 1; +} +#endif // STBI_NO_HDR + +#ifndef STBI_NO_BMP +static int stbi__bmp_info(stbi__context *s, int *x, int *y, int *comp) +{ + void *p; + stbi__bmp_data info; + + info.all_a = 255; + p = stbi__bmp_parse_header(s, &info); + if (p == NULL) { + stbi__rewind( s ); + return 0; + } + if (x) *x = s->img_x; + if (y) *y = s->img_y; + if (comp) { + if (info.bpp == 24 && info.ma == 0xff000000) + *comp = 3; + else + *comp = info.ma ? 4 : 3; + } + return 1; +} +#endif + +#ifndef STBI_NO_PSD +static int stbi__psd_info(stbi__context *s, int *x, int *y, int *comp) +{ + int channelCount, dummy, depth; + if (!x) x = &dummy; + if (!y) y = &dummy; + if (!comp) comp = &dummy; + if (stbi__get32be(s) != 0x38425053) { + stbi__rewind( s ); + return 0; + } + if (stbi__get16be(s) != 1) { + stbi__rewind( s ); + return 0; + } + stbi__skip(s, 6); + channelCount = stbi__get16be(s); + if (channelCount < 0 || channelCount > 16) { + stbi__rewind( s ); + return 0; + } + *y = stbi__get32be(s); + *x = stbi__get32be(s); + depth = stbi__get16be(s); + if (depth != 8 && depth != 16) { + stbi__rewind( s ); + return 0; + } + if (stbi__get16be(s) != 3) { + stbi__rewind( s ); + return 0; + } + *comp = 4; + return 1; +} + +static int stbi__psd_is16(stbi__context *s) +{ + int channelCount, depth; + if (stbi__get32be(s) != 0x38425053) { + stbi__rewind( s ); + return 0; + } + if (stbi__get16be(s) != 1) { + stbi__rewind( s ); + return 0; + } + stbi__skip(s, 6); + channelCount = stbi__get16be(s); + if (channelCount < 0 || channelCount > 16) { + stbi__rewind( s ); + return 0; + } + STBI_NOTUSED(stbi__get32be(s)); + STBI_NOTUSED(stbi__get32be(s)); + depth = stbi__get16be(s); + if (depth != 16) { + stbi__rewind( s ); + return 0; + } + return 1; +} +#endif + +#ifndef STBI_NO_PIC +static int stbi__pic_info(stbi__context *s, int *x, int *y, int *comp) +{ + int act_comp=0,num_packets=0,chained,dummy; + stbi__pic_packet packets[10]; + + if (!x) x = &dummy; + if (!y) y = &dummy; + if (!comp) comp = &dummy; + + if (!stbi__pic_is4(s,"\x53\x80\xF6\x34")) { + stbi__rewind(s); + return 0; + } + + stbi__skip(s, 88); + + *x = stbi__get16be(s); + *y = stbi__get16be(s); + if (stbi__at_eof(s)) { + stbi__rewind( s); + return 0; + } + if ( (*x) != 0 && (1 << 28) / (*x) < (*y)) { + stbi__rewind( s ); + return 0; + } + + stbi__skip(s, 8); + + do { + stbi__pic_packet *packet; + + if (num_packets==sizeof(packets)/sizeof(packets[0])) + return 0; + + packet = &packets[num_packets++]; + chained = stbi__get8(s); + packet->size = stbi__get8(s); + packet->type = stbi__get8(s); + packet->channel = stbi__get8(s); + act_comp |= packet->channel; + + if (stbi__at_eof(s)) { + stbi__rewind( s ); + return 0; + } + if (packet->size != 8) { + stbi__rewind( s ); + return 0; + } + } while (chained); + + *comp = (act_comp & 0x10 ? 4 : 3); + + return 1; +} +#endif + +// ************************************************************************************************* +// Portable Gray Map and Portable Pixel Map loader +// by Ken Miller +// +// PGM: http://netpbm.sourceforge.net/doc/pgm.html +// PPM: http://netpbm.sourceforge.net/doc/ppm.html +// +// Known limitations: +// Does not support comments in the header section +// Does not support ASCII image data (formats P2 and P3) + +#ifndef STBI_NO_PNM + +static int stbi__pnm_test(stbi__context *s) +{ + char p, t; + p = (char) stbi__get8(s); + t = (char) stbi__get8(s); + if (p != 'P' || (t != '5' && t != '6')) { + stbi__rewind( s ); + return 0; + } + return 1; +} + +static void *stbi__pnm_load(stbi__context *s, int *x, int *y, int *comp, int req_comp, stbi__result_info *ri) +{ + stbi_uc *out; + STBI_NOTUSED(ri); + + ri->bits_per_channel = stbi__pnm_info(s, (int *)&s->img_x, (int *)&s->img_y, (int *)&s->img_n); + if (ri->bits_per_channel == 0) + return 0; + + if (s->img_y > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + if (s->img_x > STBI_MAX_DIMENSIONS) return stbi__errpuc("too large","Very large image (corrupt?)"); + + *x = s->img_x; + *y = s->img_y; + if (comp) *comp = s->img_n; + + if (!stbi__mad4sizes_valid(s->img_n, s->img_x, s->img_y, ri->bits_per_channel / 8, 0)) + return stbi__errpuc("too large", "PNM too large"); + + out = (stbi_uc *) stbi__malloc_mad4(s->img_n, s->img_x, s->img_y, ri->bits_per_channel / 8, 0); + if (!out) return stbi__errpuc("outofmem", "Out of memory"); + if (!stbi__getn(s, out, s->img_n * s->img_x * s->img_y * (ri->bits_per_channel / 8))) { + STBI_FREE(out); + return stbi__errpuc("bad PNM", "PNM file truncated"); + } + + if (req_comp && req_comp != s->img_n) { + if (ri->bits_per_channel == 16) { + out = (stbi_uc *) stbi__convert_format16((stbi__uint16 *) out, s->img_n, req_comp, s->img_x, s->img_y); + } else { + out = stbi__convert_format(out, s->img_n, req_comp, s->img_x, s->img_y); + } + if (out == NULL) return out; // stbi__convert_format frees input on failure + } + return out; +} + +static int stbi__pnm_isspace(char c) +{ + return c == ' ' || c == '\t' || c == '\n' || c == '\v' || c == '\f' || c == '\r'; +} + +static void stbi__pnm_skip_whitespace(stbi__context *s, char *c) +{ + for (;;) { + while (!stbi__at_eof(s) && stbi__pnm_isspace(*c)) + *c = (char) stbi__get8(s); + + if (stbi__at_eof(s) || *c != '#') + break; + + while (!stbi__at_eof(s) && *c != '\n' && *c != '\r' ) + *c = (char) stbi__get8(s); + } +} + +static int stbi__pnm_isdigit(char c) +{ + return c >= '0' && c <= '9'; +} + +static int stbi__pnm_getinteger(stbi__context *s, char *c) +{ + int value = 0; + + while (!stbi__at_eof(s) && stbi__pnm_isdigit(*c)) { + value = value*10 + (*c - '0'); + *c = (char) stbi__get8(s); + if((value > 214748364) || (value == 214748364 && *c > '7')) + return stbi__err("integer parse overflow", "Parsing an integer in the PPM header overflowed a 32-bit int"); + } + + return value; +} + +static int stbi__pnm_info(stbi__context *s, int *x, int *y, int *comp) +{ + int maxv, dummy; + char c, p, t; + + if (!x) x = &dummy; + if (!y) y = &dummy; + if (!comp) comp = &dummy; + + stbi__rewind(s); + + // Get identifier + p = (char) stbi__get8(s); + t = (char) stbi__get8(s); + if (p != 'P' || (t != '5' && t != '6')) { + stbi__rewind(s); + return 0; + } + + *comp = (t == '6') ? 3 : 1; // '5' is 1-component .pgm; '6' is 3-component .ppm + + c = (char) stbi__get8(s); + stbi__pnm_skip_whitespace(s, &c); + + *x = stbi__pnm_getinteger(s, &c); // read width + if(*x == 0) + return stbi__err("invalid width", "PPM image header had zero or overflowing width"); + stbi__pnm_skip_whitespace(s, &c); + + *y = stbi__pnm_getinteger(s, &c); // read height + if (*y == 0) + return stbi__err("invalid width", "PPM image header had zero or overflowing width"); + stbi__pnm_skip_whitespace(s, &c); + + maxv = stbi__pnm_getinteger(s, &c); // read max value + if (maxv > 65535) + return stbi__err("max value > 65535", "PPM image supports only 8-bit and 16-bit images"); + else if (maxv > 255) + return 16; + else + return 8; +} + +static int stbi__pnm_is16(stbi__context *s) +{ + if (stbi__pnm_info(s, NULL, NULL, NULL) == 16) + return 1; + return 0; +} +#endif + +static int stbi__info_main(stbi__context *s, int *x, int *y, int *comp) +{ + #ifndef STBI_NO_JPEG + if (stbi__jpeg_info(s, x, y, comp)) return 1; + #endif + + #ifndef STBI_NO_PNG + if (stbi__png_info(s, x, y, comp)) return 1; + #endif + + #ifndef STBI_NO_GIF + if (stbi__gif_info(s, x, y, comp)) return 1; + #endif + + #ifndef STBI_NO_BMP + if (stbi__bmp_info(s, x, y, comp)) return 1; + #endif + + #ifndef STBI_NO_PSD + if (stbi__psd_info(s, x, y, comp)) return 1; + #endif + + #ifndef STBI_NO_PIC + if (stbi__pic_info(s, x, y, comp)) return 1; + #endif + + #ifndef STBI_NO_PNM + if (stbi__pnm_info(s, x, y, comp)) return 1; + #endif + + #ifndef STBI_NO_HDR + if (stbi__hdr_info(s, x, y, comp)) return 1; + #endif + + // test tga last because it's a crappy test! + #ifndef STBI_NO_TGA + if (stbi__tga_info(s, x, y, comp)) + return 1; + #endif + return stbi__err("unknown image type", "Image not of any known type, or corrupt"); +} + +static int stbi__is_16_main(stbi__context *s) +{ + #ifndef STBI_NO_PNG + if (stbi__png_is16(s)) return 1; + #endif + + #ifndef STBI_NO_PSD + if (stbi__psd_is16(s)) return 1; + #endif + + #ifndef STBI_NO_PNM + if (stbi__pnm_is16(s)) return 1; + #endif + return 0; +} + +#ifndef STBI_NO_STDIO +STBIDEF int stbi_info(char const *filename, int *x, int *y, int *comp) +{ + FILE *f = stbi__fopen(filename, "rb"); + int result; + if (!f) return stbi__err("can't fopen", "Unable to open file"); + result = stbi_info_from_file(f, x, y, comp); + fclose(f); + return result; +} + +STBIDEF int stbi_info_from_file(FILE *f, int *x, int *y, int *comp) +{ + int r; + stbi__context s; + long pos = ftell(f); + stbi__start_file(&s, f); + r = stbi__info_main(&s,x,y,comp); + fseek(f,pos,SEEK_SET); + return r; +} + +STBIDEF int stbi_is_16_bit(char const *filename) +{ + FILE *f = stbi__fopen(filename, "rb"); + int result; + if (!f) return stbi__err("can't fopen", "Unable to open file"); + result = stbi_is_16_bit_from_file(f); + fclose(f); + return result; +} + +STBIDEF int stbi_is_16_bit_from_file(FILE *f) +{ + int r; + stbi__context s; + long pos = ftell(f); + stbi__start_file(&s, f); + r = stbi__is_16_main(&s); + fseek(f,pos,SEEK_SET); + return r; +} +#endif // !STBI_NO_STDIO + +STBIDEF int stbi_info_from_memory(stbi_uc const *buffer, int len, int *x, int *y, int *comp) +{ + stbi__context s; + stbi__start_mem(&s,buffer,len); + return stbi__info_main(&s,x,y,comp); +} + +STBIDEF int stbi_info_from_callbacks(stbi_io_callbacks const *c, void *user, int *x, int *y, int *comp) +{ + stbi__context s; + stbi__start_callbacks(&s, (stbi_io_callbacks *) c, user); + return stbi__info_main(&s,x,y,comp); +} + +STBIDEF int stbi_is_16_bit_from_memory(stbi_uc const *buffer, int len) +{ + stbi__context s; + stbi__start_mem(&s,buffer,len); + return stbi__is_16_main(&s); +} + +STBIDEF int stbi_is_16_bit_from_callbacks(stbi_io_callbacks const *c, void *user) +{ + stbi__context s; + stbi__start_callbacks(&s, (stbi_io_callbacks *) c, user); + return stbi__is_16_main(&s); +} + +#endif // STB_IMAGE_IMPLEMENTATION + +/* + revision history: + 2.20 (2019-02-07) support utf8 filenames in Windows; fix warnings and platform ifdefs + 2.19 (2018-02-11) fix warning + 2.18 (2018-01-30) fix warnings + 2.17 (2018-01-29) change sbti__shiftsigned to avoid clang -O2 bug + 1-bit BMP + *_is_16_bit api + avoid warnings + 2.16 (2017-07-23) all functions have 16-bit variants; + STBI_NO_STDIO works again; + compilation fixes; + fix rounding in unpremultiply; + optimize vertical flip; + disable raw_len validation; + documentation fixes + 2.15 (2017-03-18) fix png-1,2,4 bug; now all Imagenet JPGs decode; + warning fixes; disable run-time SSE detection on gcc; + uniform handling of optional "return" values; + thread-safe initialization of zlib tables + 2.14 (2017-03-03) remove deprecated STBI_JPEG_OLD; fixes for Imagenet JPGs + 2.13 (2016-11-29) add 16-bit API, only supported for PNG right now + 2.12 (2016-04-02) fix typo in 2.11 PSD fix that caused crashes + 2.11 (2016-04-02) allocate large structures on the stack + remove white matting for transparent PSD + fix reported channel count for PNG & BMP + re-enable SSE2 in non-gcc 64-bit + support RGB-formatted JPEG + read 16-bit PNGs (only as 8-bit) + 2.10 (2016-01-22) avoid warning introduced in 2.09 by STBI_REALLOC_SIZED + 2.09 (2016-01-16) allow comments in PNM files + 16-bit-per-pixel TGA (not bit-per-component) + info() for TGA could break due to .hdr handling + info() for BMP to shares code instead of sloppy parse + can use STBI_REALLOC_SIZED if allocator doesn't support realloc + code cleanup + 2.08 (2015-09-13) fix to 2.07 cleanup, reading RGB PSD as RGBA + 2.07 (2015-09-13) fix compiler warnings + partial animated GIF support + limited 16-bpc PSD support + #ifdef unused functions + bug with < 92 byte PIC,PNM,HDR,TGA + 2.06 (2015-04-19) fix bug where PSD returns wrong '*comp' value + 2.05 (2015-04-19) fix bug in progressive JPEG handling, fix warning + 2.04 (2015-04-15) try to re-enable SIMD on MinGW 64-bit + 2.03 (2015-04-12) extra corruption checking (mmozeiko) + stbi_set_flip_vertically_on_load (nguillemot) + fix NEON support; fix mingw support + 2.02 (2015-01-19) fix incorrect assert, fix warning + 2.01 (2015-01-17) fix various warnings; suppress SIMD on gcc 32-bit without -msse2 + 2.00b (2014-12-25) fix STBI_MALLOC in progressive JPEG + 2.00 (2014-12-25) optimize JPG, including x86 SSE2 & NEON SIMD (ryg) + progressive JPEG (stb) + PGM/PPM support (Ken Miller) + STBI_MALLOC,STBI_REALLOC,STBI_FREE + GIF bugfix -- seemingly never worked + STBI_NO_*, STBI_ONLY_* + 1.48 (2014-12-14) fix incorrectly-named assert() + 1.47 (2014-12-14) 1/2/4-bit PNG support, both direct and paletted (Omar Cornut & stb) + optimize PNG (ryg) + fix bug in interlaced PNG with user-specified channel count (stb) + 1.46 (2014-08-26) + fix broken tRNS chunk (colorkey-style transparency) in non-paletted PNG + 1.45 (2014-08-16) + fix MSVC-ARM internal compiler error by wrapping malloc + 1.44 (2014-08-07) + various warning fixes from Ronny Chevalier + 1.43 (2014-07-15) + fix MSVC-only compiler problem in code changed in 1.42 + 1.42 (2014-07-09) + don't define _CRT_SECURE_NO_WARNINGS (affects user code) + fixes to stbi__cleanup_jpeg path + added STBI_ASSERT to avoid requiring assert.h + 1.41 (2014-06-25) + fix search&replace from 1.36 that messed up comments/error messages + 1.40 (2014-06-22) + fix gcc struct-initialization warning + 1.39 (2014-06-15) + fix to TGA optimization when req_comp != number of components in TGA; + fix to GIF loading because BMP wasn't rewinding (whoops, no GIFs in my test suite) + add support for BMP version 5 (more ignored fields) + 1.38 (2014-06-06) + suppress MSVC warnings on integer casts truncating values + fix accidental rename of 'skip' field of I/O + 1.37 (2014-06-04) + remove duplicate typedef + 1.36 (2014-06-03) + convert to header file single-file library + if de-iphone isn't set, load iphone images color-swapped instead of returning NULL + 1.35 (2014-05-27) + various warnings + fix broken STBI_SIMD path + fix bug where stbi_load_from_file no longer left file pointer in correct place + fix broken non-easy path for 32-bit BMP (possibly never used) + TGA optimization by Arseny Kapoulkine + 1.34 (unknown) + use STBI_NOTUSED in stbi__resample_row_generic(), fix one more leak in tga failure case + 1.33 (2011-07-14) + make stbi_is_hdr work in STBI_NO_HDR (as specified), minor compiler-friendly improvements + 1.32 (2011-07-13) + support for "info" function for all supported filetypes (SpartanJ) + 1.31 (2011-06-20) + a few more leak fixes, bug in PNG handling (SpartanJ) + 1.30 (2011-06-11) + added ability to load files via callbacks to accomidate custom input streams (Ben Wenger) + removed deprecated format-specific test/load functions + removed support for installable file formats (stbi_loader) -- would have been broken for IO callbacks anyway + error cases in bmp and tga give messages and don't leak (Raymond Barbiero, grisha) + fix inefficiency in decoding 32-bit BMP (David Woo) + 1.29 (2010-08-16) + various warning fixes from Aurelien Pocheville + 1.28 (2010-08-01) + fix bug in GIF palette transparency (SpartanJ) + 1.27 (2010-08-01) + cast-to-stbi_uc to fix warnings + 1.26 (2010-07-24) + fix bug in file buffering for PNG reported by SpartanJ + 1.25 (2010-07-17) + refix trans_data warning (Won Chun) + 1.24 (2010-07-12) + perf improvements reading from files on platforms with lock-heavy fgetc() + minor perf improvements for jpeg + deprecated type-specific functions so we'll get feedback if they're needed + attempt to fix trans_data warning (Won Chun) + 1.23 fixed bug in iPhone support + 1.22 (2010-07-10) + removed image *writing* support + stbi_info support from Jetro Lauha + GIF support from Jean-Marc Lienher + iPhone PNG-extensions from James Brown + warning-fixes from Nicolas Schulz and Janez Zemva (i.stbi__err. Janez (U+017D)emva) + 1.21 fix use of 'stbi_uc' in header (reported by jon blow) + 1.20 added support for Softimage PIC, by Tom Seddon + 1.19 bug in interlaced PNG corruption check (found by ryg) + 1.18 (2008-08-02) + fix a threading bug (local mutable static) + 1.17 support interlaced PNG + 1.16 major bugfix - stbi__convert_format converted one too many pixels + 1.15 initialize some fields for thread safety + 1.14 fix threadsafe conversion bug + header-file-only version (#define STBI_HEADER_FILE_ONLY before including) + 1.13 threadsafe + 1.12 const qualifiers in the API + 1.11 Support installable IDCT, colorspace conversion routines + 1.10 Fixes for 64-bit (don't use "unsigned long") + optimized upsampling by Fabian "ryg" Giesen + 1.09 Fix format-conversion for PSD code (bad global variables!) + 1.08 Thatcher Ulrich's PSD code integrated by Nicolas Schulz + 1.07 attempt to fix C++ warning/errors again + 1.06 attempt to fix C++ warning/errors again + 1.05 fix TGA loading to return correct *comp and use good luminance calc + 1.04 default float alpha is 1, not 255; use 'void *' for stbi_image_free + 1.03 bugfixes to STBI_NO_STDIO, STBI_NO_HDR + 1.02 support for (subset of) HDR files, float interface for preferred access to them + 1.01 fix bug: possible bug in handling right-side up bmps... not sure + fix bug: the stbi__bmp_load() and stbi__tga_load() functions didn't work at all + 1.00 interface to zlib that skips zlib header + 0.99 correct handling of alpha in palette + 0.98 TGA loader by lonesock; dynamically add loaders (untested) + 0.97 jpeg errors on too large a file; also catch another malloc failure + 0.96 fix detection of invalid v value - particleman@mollyrocket forum + 0.95 during header scan, seek to markers in case of padding + 0.94 STBI_NO_STDIO to disable stdio usage; rename all #defines the same + 0.93 handle jpegtran output; verbose errors + 0.92 read 4,8,16,24,32-bit BMP files of several formats + 0.91 output 24-bit Windows 3.0 BMP files + 0.90 fix a few more warnings; bump version number to approach 1.0 + 0.61 bugfixes due to Marc LeBlanc, Christopher Lloyd + 0.60 fix compiling as c++ + 0.59 fix warnings: merge Dave Moore's -Wall fixes + 0.58 fix bug: zlib uncompressed mode len/nlen was wrong endian + 0.57 fix bug: jpg last huffman symbol before marker was >9 bits but less than 16 available + 0.56 fix bug: zlib uncompressed mode len vs. nlen + 0.55 fix bug: restart_interval not initialized to 0 + 0.54 allow NULL for 'int *comp' + 0.53 fix bug in png 3->4; speedup png decoding + 0.52 png handles req_comp=3,4 directly; minor cleanup; jpeg comments + 0.51 obey req_comp requests, 1-component jpegs return as 1-component, + on 'test' only check type, not whether we support this variant + 0.50 (2006-11-19) + first released version +*/ + + +/* +------------------------------------------------------------------------------ +This software is available under 2 licenses -- choose whichever you prefer. +------------------------------------------------------------------------------ +ALTERNATIVE A - MIT License +Copyright (c) 2017 Sean Barrett +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +------------------------------------------------------------------------------ +ALTERNATIVE B - Public Domain (www.unlicense.org) +This is free and unencumbered software released into the public domain. +Anyone is free to copy, modify, publish, use, compile, sell, or distribute this +software, either in source code form or as a compiled binary, for any purpose, +commercial or non-commercial, and by any means. +In jurisdictions that recognize copyright laws, the author or authors of this +software dedicate any and all copyright interest in the software to the public +domain. We make this dedication for the benefit of the public at large and to +the detriment of our heirs and successors. We intend this dedication to be an +overt act of relinquishment in perpetuity of all present and future rights to +this software under copyright law. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +------------------------------------------------------------------------------ +*/ diff --git a/MacroLibX/third_party/stb_rect_pack.h b/MacroLibX/third_party/stb_rect_pack.h new file mode 100644 index 0000000..6a633ce --- /dev/null +++ b/MacroLibX/third_party/stb_rect_pack.h @@ -0,0 +1,623 @@ +// stb_rect_pack.h - v1.01 - public domain - rectangle packing +// Sean Barrett 2014 +// +// Useful for e.g. packing rectangular textures into an atlas. +// Does not do rotation. +// +// Before #including, +// +// #define STB_RECT_PACK_IMPLEMENTATION +// +// in the file that you want to have the implementation. +// +// Not necessarily the awesomest packing method, but better than +// the totally naive one in stb_truetype (which is primarily what +// this is meant to replace). +// +// Has only had a few tests run, may have issues. +// +// More docs to come. +// +// No memory allocations; uses qsort() and assert() from stdlib. +// Can override those by defining STBRP_SORT and STBRP_ASSERT. +// +// This library currently uses the Skyline Bottom-Left algorithm. +// +// Please note: better rectangle packers are welcome! Please +// implement them to the same API, but with a different init +// function. +// +// Credits +// +// Library +// Sean Barrett +// Minor features +// Martins Mozeiko +// github:IntellectualKitty +// +// Bugfixes / warning fixes +// Jeremy Jaussaud +// Fabian Giesen +// +// Version history: +// +// 1.01 (2021-07-11) always use large rect mode, expose STBRP__MAXVAL in public section +// 1.00 (2019-02-25) avoid small space waste; gracefully fail too-wide rectangles +// 0.99 (2019-02-07) warning fixes +// 0.11 (2017-03-03) return packing success/fail result +// 0.10 (2016-10-25) remove cast-away-const to avoid warnings +// 0.09 (2016-08-27) fix compiler warnings +// 0.08 (2015-09-13) really fix bug with empty rects (w=0 or h=0) +// 0.07 (2015-09-13) fix bug with empty rects (w=0 or h=0) +// 0.06 (2015-04-15) added STBRP_SORT to allow replacing qsort +// 0.05: added STBRP_ASSERT to allow replacing assert +// 0.04: fixed minor bug in STBRP_LARGE_RECTS support +// 0.01: initial release +// +// LICENSE +// +// See end of file for license information. + +////////////////////////////////////////////////////////////////////////////// +// +// INCLUDE SECTION +// + +#ifndef STB_INCLUDE_STB_RECT_PACK_H +#define STB_INCLUDE_STB_RECT_PACK_H + +#define STB_RECT_PACK_VERSION 1 + +#ifdef STBRP_STATIC +#define STBRP_DEF static +#else +#define STBRP_DEF extern +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct stbrp_context stbrp_context; +typedef struct stbrp_node stbrp_node; +typedef struct stbrp_rect stbrp_rect; + +typedef int stbrp_coord; + +#define STBRP__MAXVAL 0x7fffffff +// Mostly for internal use, but this is the maximum supported coordinate value. + +STBRP_DEF int stbrp_pack_rects (stbrp_context *context, stbrp_rect *rects, int num_rects); +// Assign packed locations to rectangles. The rectangles are of type +// 'stbrp_rect' defined below, stored in the array 'rects', and there +// are 'num_rects' many of them. +// +// Rectangles which are successfully packed have the 'was_packed' flag +// set to a non-zero value and 'x' and 'y' store the minimum location +// on each axis (i.e. bottom-left in cartesian coordinates, top-left +// if you imagine y increasing downwards). Rectangles which do not fit +// have the 'was_packed' flag set to 0. +// +// You should not try to access the 'rects' array from another thread +// while this function is running, as the function temporarily reorders +// the array while it executes. +// +// To pack into another rectangle, you need to call stbrp_init_target +// again. To continue packing into the same rectangle, you can call +// this function again. Calling this multiple times with multiple rect +// arrays will probably produce worse packing results than calling it +// a single time with the full rectangle array, but the option is +// available. +// +// The function returns 1 if all of the rectangles were successfully +// packed and 0 otherwise. + +struct stbrp_rect +{ + // reserved for your use: + int id; + + // input: + stbrp_coord w, h; + + // output: + stbrp_coord x, y; + int was_packed; // non-zero if valid packing + +}; // 16 bytes, nominally + + +STBRP_DEF void stbrp_init_target (stbrp_context *context, int width, int height, stbrp_node *nodes, int num_nodes); +// Initialize a rectangle packer to: +// pack a rectangle that is 'width' by 'height' in dimensions +// using temporary storage provided by the array 'nodes', which is 'num_nodes' long +// +// You must call this function every time you start packing into a new target. +// +// There is no "shutdown" function. The 'nodes' memory must stay valid for +// the following stbrp_pack_rects() call (or calls), but can be freed after +// the call (or calls) finish. +// +// Note: to guarantee best results, either: +// 1. make sure 'num_nodes' >= 'width' +// or 2. call stbrp_allow_out_of_mem() defined below with 'allow_out_of_mem = 1' +// +// If you don't do either of the above things, widths will be quantized to multiples +// of small integers to guarantee the algorithm doesn't run out of temporary storage. +// +// If you do #2, then the non-quantized algorithm will be used, but the algorithm +// may run out of temporary storage and be unable to pack some rectangles. + +STBRP_DEF void stbrp_setup_allow_out_of_mem (stbrp_context *context, int allow_out_of_mem); +// Optionally call this function after init but before doing any packing to +// change the handling of the out-of-temp-memory scenario, described above. +// If you call init again, this will be reset to the default (false). + + +STBRP_DEF void stbrp_setup_heuristic (stbrp_context *context, int heuristic); +// Optionally select which packing heuristic the library should use. Different +// heuristics will produce better/worse results for different data sets. +// If you call init again, this will be reset to the default. + +enum +{ + STBRP_HEURISTIC_Skyline_default=0, + STBRP_HEURISTIC_Skyline_BL_sortHeight = STBRP_HEURISTIC_Skyline_default, + STBRP_HEURISTIC_Skyline_BF_sortHeight +}; + + +////////////////////////////////////////////////////////////////////////////// +// +// the details of the following structures don't matter to you, but they must +// be visible so you can handle the memory allocations for them + +struct stbrp_node +{ + stbrp_coord x,y; + stbrp_node *next; +}; + +struct stbrp_context +{ + int width; + int height; + int align; + int init_mode; + int heuristic; + int num_nodes; + stbrp_node *active_head; + stbrp_node *free_head; + stbrp_node extra[2]; // we allocate two extra nodes so optimal user-node-count is 'width' not 'width+2' +}; + +#ifdef __cplusplus +} +#endif + +#endif + +////////////////////////////////////////////////////////////////////////////// +// +// IMPLEMENTATION SECTION +// + +#ifdef STB_RECT_PACK_IMPLEMENTATION +#ifndef STBRP_SORT +#include +#define STBRP_SORT qsort +#endif + +#ifndef STBRP_ASSERT +#include +#define STBRP_ASSERT assert +#endif + +#ifdef _MSC_VER +#define STBRP__NOTUSED(v) (void)(v) +#define STBRP__CDECL __cdecl +#else +#define STBRP__NOTUSED(v) (void)sizeof(v) +#define STBRP__CDECL +#endif + +enum +{ + STBRP__INIT_skyline = 1 +}; + +STBRP_DEF void stbrp_setup_heuristic(stbrp_context *context, int heuristic) +{ + switch (context->init_mode) { + case STBRP__INIT_skyline: + STBRP_ASSERT(heuristic == STBRP_HEURISTIC_Skyline_BL_sortHeight || heuristic == STBRP_HEURISTIC_Skyline_BF_sortHeight); + context->heuristic = heuristic; + break; + default: + STBRP_ASSERT(0); + } +} + +STBRP_DEF void stbrp_setup_allow_out_of_mem(stbrp_context *context, int allow_out_of_mem) +{ + if (allow_out_of_mem) + // if it's ok to run out of memory, then don't bother aligning them; + // this gives better packing, but may fail due to OOM (even though + // the rectangles easily fit). @TODO a smarter approach would be to only + // quantize once we've hit OOM, then we could get rid of this parameter. + context->align = 1; + else { + // if it's not ok to run out of memory, then quantize the widths + // so that num_nodes is always enough nodes. + // + // I.e. num_nodes * align >= width + // align >= width / num_nodes + // align = ceil(width/num_nodes) + + context->align = (context->width + context->num_nodes-1) / context->num_nodes; + } +} + +STBRP_DEF void stbrp_init_target(stbrp_context *context, int width, int height, stbrp_node *nodes, int num_nodes) +{ + int i; + + for (i=0; i < num_nodes-1; ++i) + nodes[i].next = &nodes[i+1]; + nodes[i].next = NULL; + context->init_mode = STBRP__INIT_skyline; + context->heuristic = STBRP_HEURISTIC_Skyline_default; + context->free_head = &nodes[0]; + context->active_head = &context->extra[0]; + context->width = width; + context->height = height; + context->num_nodes = num_nodes; + stbrp_setup_allow_out_of_mem(context, 0); + + // node 0 is the full width, node 1 is the sentinel (lets us not store width explicitly) + context->extra[0].x = 0; + context->extra[0].y = 0; + context->extra[0].next = &context->extra[1]; + context->extra[1].x = (stbrp_coord) width; + context->extra[1].y = (1<<30); + context->extra[1].next = NULL; +} + +// find minimum y position if it starts at x1 +static int stbrp__skyline_find_min_y(stbrp_context *c, stbrp_node *first, int x0, int width, int *pwaste) +{ + stbrp_node *node = first; + int x1 = x0 + width; + int min_y, visited_width, waste_area; + + STBRP__NOTUSED(c); + + STBRP_ASSERT(first->x <= x0); + + #if 0 + // skip in case we're past the node + while (node->next->x <= x0) + ++node; + #else + STBRP_ASSERT(node->next->x > x0); // we ended up handling this in the caller for efficiency + #endif + + STBRP_ASSERT(node->x <= x0); + + min_y = 0; + waste_area = 0; + visited_width = 0; + while (node->x < x1) { + if (node->y > min_y) { + // raise min_y higher. + // we've accounted for all waste up to min_y, + // but we'll now add more waste for everything we've visted + waste_area += visited_width * (node->y - min_y); + min_y = node->y; + // the first time through, visited_width might be reduced + if (node->x < x0) + visited_width += node->next->x - x0; + else + visited_width += node->next->x - node->x; + } else { + // add waste area + int under_width = node->next->x - node->x; + if (under_width + visited_width > width) + under_width = width - visited_width; + waste_area += under_width * (min_y - node->y); + visited_width += under_width; + } + node = node->next; + } + + *pwaste = waste_area; + return min_y; +} + +typedef struct +{ + int x,y; + stbrp_node **prev_link; +} stbrp__findresult; + +static stbrp__findresult stbrp__skyline_find_best_pos(stbrp_context *c, int width, int height) +{ + int best_waste = (1<<30), best_x, best_y = (1 << 30); + stbrp__findresult fr; + stbrp_node **prev, *node, *tail, **best = NULL; + + // align to multiple of c->align + width = (width + c->align - 1); + width -= width % c->align; + STBRP_ASSERT(width % c->align == 0); + + // if it can't possibly fit, bail immediately + if (width > c->width || height > c->height) { + fr.prev_link = NULL; + fr.x = fr.y = 0; + return fr; + } + + node = c->active_head; + prev = &c->active_head; + while (node->x + width <= c->width) { + int y,waste; + y = stbrp__skyline_find_min_y(c, node, node->x, width, &waste); + if (c->heuristic == STBRP_HEURISTIC_Skyline_BL_sortHeight) { // actually just want to test BL + // bottom left + if (y < best_y) { + best_y = y; + best = prev; + } + } else { + // best-fit + if (y + height <= c->height) { + // can only use it if it first vertically + if (y < best_y || (y == best_y && waste < best_waste)) { + best_y = y; + best_waste = waste; + best = prev; + } + } + } + prev = &node->next; + node = node->next; + } + + best_x = (best == NULL) ? 0 : (*best)->x; + + // if doing best-fit (BF), we also have to try aligning right edge to each node position + // + // e.g, if fitting + // + // ____________________ + // |____________________| + // + // into + // + // | | + // | ____________| + // |____________| + // + // then right-aligned reduces waste, but bottom-left BL is always chooses left-aligned + // + // This makes BF take about 2x the time + + if (c->heuristic == STBRP_HEURISTIC_Skyline_BF_sortHeight) { + tail = c->active_head; + node = c->active_head; + prev = &c->active_head; + // find first node that's admissible + while (tail->x < width) + tail = tail->next; + while (tail) { + int xpos = tail->x - width; + int y,waste; + STBRP_ASSERT(xpos >= 0); + // find the left position that matches this + while (node->next->x <= xpos) { + prev = &node->next; + node = node->next; + } + STBRP_ASSERT(node->next->x > xpos && node->x <= xpos); + y = stbrp__skyline_find_min_y(c, node, xpos, width, &waste); + if (y + height <= c->height) { + if (y <= best_y) { + if (y < best_y || waste < best_waste || (waste==best_waste && xpos < best_x)) { + best_x = xpos; + STBRP_ASSERT(y <= best_y); + best_y = y; + best_waste = waste; + best = prev; + } + } + } + tail = tail->next; + } + } + + fr.prev_link = best; + fr.x = best_x; + fr.y = best_y; + return fr; +} + +static stbrp__findresult stbrp__skyline_pack_rectangle(stbrp_context *context, int width, int height) +{ + // find best position according to heuristic + stbrp__findresult res = stbrp__skyline_find_best_pos(context, width, height); + stbrp_node *node, *cur; + + // bail if: + // 1. it failed + // 2. the best node doesn't fit (we don't always check this) + // 3. we're out of memory + if (res.prev_link == NULL || res.y + height > context->height || context->free_head == NULL) { + res.prev_link = NULL; + return res; + } + + // on success, create new node + node = context->free_head; + node->x = (stbrp_coord) res.x; + node->y = (stbrp_coord) (res.y + height); + + context->free_head = node->next; + + // insert the new node into the right starting point, and + // let 'cur' point to the remaining nodes needing to be + // stiched back in + + cur = *res.prev_link; + if (cur->x < res.x) { + // preserve the existing one, so start testing with the next one + stbrp_node *next = cur->next; + cur->next = node; + cur = next; + } else { + *res.prev_link = node; + } + + // from here, traverse cur and free the nodes, until we get to one + // that shouldn't be freed + while (cur->next && cur->next->x <= res.x + width) { + stbrp_node *next = cur->next; + // move the current node to the free list + cur->next = context->free_head; + context->free_head = cur; + cur = next; + } + + // stitch the list back in + node->next = cur; + + if (cur->x < res.x + width) + cur->x = (stbrp_coord) (res.x + width); + +#ifdef _DEBUG + cur = context->active_head; + while (cur->x < context->width) { + STBRP_ASSERT(cur->x < cur->next->x); + cur = cur->next; + } + STBRP_ASSERT(cur->next == NULL); + + { + int count=0; + cur = context->active_head; + while (cur) { + cur = cur->next; + ++count; + } + cur = context->free_head; + while (cur) { + cur = cur->next; + ++count; + } + STBRP_ASSERT(count == context->num_nodes+2); + } +#endif + + return res; +} + +static int STBRP__CDECL rect_height_compare(const void *a, const void *b) +{ + const stbrp_rect *p = (const stbrp_rect *) a; + const stbrp_rect *q = (const stbrp_rect *) b; + if (p->h > q->h) + return -1; + if (p->h < q->h) + return 1; + return (p->w > q->w) ? -1 : (p->w < q->w); +} + +static int STBRP__CDECL rect_original_order(const void *a, const void *b) +{ + const stbrp_rect *p = (const stbrp_rect *) a; + const stbrp_rect *q = (const stbrp_rect *) b; + return (p->was_packed < q->was_packed) ? -1 : (p->was_packed > q->was_packed); +} + +STBRP_DEF int stbrp_pack_rects(stbrp_context *context, stbrp_rect *rects, int num_rects) +{ + int i, all_rects_packed = 1; + + // we use the 'was_packed' field internally to allow sorting/unsorting + for (i=0; i < num_rects; ++i) { + rects[i].was_packed = i; + } + + // sort according to heuristic + STBRP_SORT(rects, num_rects, sizeof(rects[0]), rect_height_compare); + + for (i=0; i < num_rects; ++i) { + if (rects[i].w == 0 || rects[i].h == 0) { + rects[i].x = rects[i].y = 0; // empty rect needs no space + } else { + stbrp__findresult fr = stbrp__skyline_pack_rectangle(context, rects[i].w, rects[i].h); + if (fr.prev_link) { + rects[i].x = (stbrp_coord) fr.x; + rects[i].y = (stbrp_coord) fr.y; + } else { + rects[i].x = rects[i].y = STBRP__MAXVAL; + } + } + } + + // unsort + STBRP_SORT(rects, num_rects, sizeof(rects[0]), rect_original_order); + + // set was_packed flags and all_rects_packed status + for (i=0; i < num_rects; ++i) { + rects[i].was_packed = !(rects[i].x == STBRP__MAXVAL && rects[i].y == STBRP__MAXVAL); + if (!rects[i].was_packed) + all_rects_packed = 0; + } + + // return the all_rects_packed status + return all_rects_packed; +} +#endif + +/* +------------------------------------------------------------------------------ +This software is available under 2 licenses -- choose whichever you prefer. +------------------------------------------------------------------------------ +ALTERNATIVE A - MIT License +Copyright (c) 2017 Sean Barrett +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +------------------------------------------------------------------------------ +ALTERNATIVE B - Public Domain (www.unlicense.org) +This is free and unencumbered software released into the public domain. +Anyone is free to copy, modify, publish, use, compile, sell, or distribute this +software, either in source code form or as a compiled binary, for any purpose, +commercial or non-commercial, and by any means. +In jurisdictions that recognize copyright laws, the author or authors of this +software dedicate any and all copyright interest in the software to the public +domain. We make this dedication for the benefit of the public at large and to +the detriment of our heirs and successors. We intend this dedication to be an +overt act of relinquishment in perpetuity of all present and future rights to +this software under copyright law. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +------------------------------------------------------------------------------ +*/ diff --git a/MacroLibX/third_party/stb_truetype.h b/MacroLibX/third_party/stb_truetype.h new file mode 100644 index 0000000..bbf2284 --- /dev/null +++ b/MacroLibX/third_party/stb_truetype.h @@ -0,0 +1,5077 @@ +// stb_truetype.h - v1.26 - public domain +// authored from 2009-2021 by Sean Barrett / RAD Game Tools +// +// ======================================================================= +// +// NO SECURITY GUARANTEE -- DO NOT USE THIS ON UNTRUSTED FONT FILES +// +// This library does no range checking of the offsets found in the file, +// meaning an attacker can use it to read arbitrary memory. +// +// ======================================================================= +// +// This library processes TrueType files: +// parse files +// extract glyph metrics +// extract glyph shapes +// render glyphs to one-channel bitmaps with antialiasing (box filter) +// render glyphs to one-channel SDF bitmaps (signed-distance field/function) +// +// Todo: +// non-MS cmaps +// crashproof on bad data +// hinting? (no longer patented) +// cleartype-style AA? +// optimize: use simple memory allocator for intermediates +// optimize: build edge-list directly from curves +// optimize: rasterize directly from curves? +// +// ADDITIONAL CONTRIBUTORS +// +// Mikko Mononen: compound shape support, more cmap formats +// Tor Andersson: kerning, subpixel rendering +// Dougall Johnson: OpenType / Type 2 font handling +// Daniel Ribeiro Maciel: basic GPOS-based kerning +// +// Misc other: +// Ryan Gordon +// Simon Glass +// github:IntellectualKitty +// Imanol Celaya +// Daniel Ribeiro Maciel +// +// Bug/warning reports/fixes: +// "Zer" on mollyrocket Fabian "ryg" Giesen github:NiLuJe +// Cass Everitt Martins Mozeiko github:aloucks +// stoiko (Haemimont Games) Cap Petschulat github:oyvindjam +// Brian Hook Omar Cornut github:vassvik +// Walter van Niftrik Ryan Griege +// David Gow Peter LaValle +// David Given Sergey Popov +// Ivan-Assen Ivanov Giumo X. Clanjor +// Anthony Pesch Higor Euripedes +// Johan Duparc Thomas Fields +// Hou Qiming Derek Vinyard +// Rob Loach Cort Stratton +// Kenney Phillis Jr. Brian Costabile +// Ken Voskuil (kaesve) +// +// VERSION HISTORY +// +// 1.26 (2021-08-28) fix broken rasterizer +// 1.25 (2021-07-11) many fixes +// 1.24 (2020-02-05) fix warning +// 1.23 (2020-02-02) query SVG data for glyphs; query whole kerning table (but only kern not GPOS) +// 1.22 (2019-08-11) minimize missing-glyph duplication; fix kerning if both 'GPOS' and 'kern' are defined +// 1.21 (2019-02-25) fix warning +// 1.20 (2019-02-07) PackFontRange skips missing codepoints; GetScaleFontVMetrics() +// 1.19 (2018-02-11) GPOS kerning, STBTT_fmod +// 1.18 (2018-01-29) add missing function +// 1.17 (2017-07-23) make more arguments const; doc fix +// 1.16 (2017-07-12) SDF support +// 1.15 (2017-03-03) make more arguments const +// 1.14 (2017-01-16) num-fonts-in-TTC function +// 1.13 (2017-01-02) support OpenType fonts, certain Apple fonts +// 1.12 (2016-10-25) suppress warnings about casting away const with -Wcast-qual +// 1.11 (2016-04-02) fix unused-variable warning +// 1.10 (2016-04-02) user-defined fabs(); rare memory leak; remove duplicate typedef +// 1.09 (2016-01-16) warning fix; avoid crash on outofmem; use allocation userdata properly +// 1.08 (2015-09-13) document stbtt_Rasterize(); fixes for vertical & horizontal edges +// 1.07 (2015-08-01) allow PackFontRanges to accept arrays of sparse codepoints; +// variant PackFontRanges to pack and render in separate phases; +// fix stbtt_GetFontOFfsetForIndex (never worked for non-0 input?); +// fixed an assert() bug in the new rasterizer +// replace assert() with STBTT_assert() in new rasterizer +// +// Full history can be found at the end of this file. +// +// LICENSE +// +// See end of file for license information. +// +// USAGE +// +// Include this file in whatever places need to refer to it. In ONE C/C++ +// file, write: +// #define STB_TRUETYPE_IMPLEMENTATION +// before the #include of this file. This expands out the actual +// implementation into that C/C++ file. +// +// To make the implementation private to the file that generates the implementation, +// #define STBTT_STATIC +// +// Simple 3D API (don't ship this, but it's fine for tools and quick start) +// stbtt_BakeFontBitmap() -- bake a font to a bitmap for use as texture +// stbtt_GetBakedQuad() -- compute quad to draw for a given char +// +// Improved 3D API (more shippable): +// #include "stb_rect_pack.h" -- optional, but you really want it +// stbtt_PackBegin() +// stbtt_PackSetOversampling() -- for improved quality on small fonts +// stbtt_PackFontRanges() -- pack and renders +// stbtt_PackEnd() +// stbtt_GetPackedQuad() +// +// "Load" a font file from a memory buffer (you have to keep the buffer loaded) +// stbtt_InitFont() +// stbtt_GetFontOffsetForIndex() -- indexing for TTC font collections +// stbtt_GetNumberOfFonts() -- number of fonts for TTC font collections +// +// Render a unicode codepoint to a bitmap +// stbtt_GetCodepointBitmap() -- allocates and returns a bitmap +// stbtt_MakeCodepointBitmap() -- renders into bitmap you provide +// stbtt_GetCodepointBitmapBox() -- how big the bitmap must be +// +// Character advance/positioning +// stbtt_GetCodepointHMetrics() +// stbtt_GetFontVMetrics() +// stbtt_GetFontVMetricsOS2() +// stbtt_GetCodepointKernAdvance() +// +// Starting with version 1.06, the rasterizer was replaced with a new, +// faster and generally-more-precise rasterizer. The new rasterizer more +// accurately measures pixel coverage for anti-aliasing, except in the case +// where multiple shapes overlap, in which case it overestimates the AA pixel +// coverage. Thus, anti-aliasing of intersecting shapes may look wrong. If +// this turns out to be a problem, you can re-enable the old rasterizer with +// #define STBTT_RASTERIZER_VERSION 1 +// which will incur about a 15% speed hit. +// +// ADDITIONAL DOCUMENTATION +// +// Immediately after this block comment are a series of sample programs. +// +// After the sample programs is the "header file" section. This section +// includes documentation for each API function. +// +// Some important concepts to understand to use this library: +// +// Codepoint +// Characters are defined by unicode codepoints, e.g. 65 is +// uppercase A, 231 is lowercase c with a cedilla, 0x7e30 is +// the hiragana for "ma". +// +// Glyph +// A visual character shape (every codepoint is rendered as +// some glyph) +// +// Glyph index +// A font-specific integer ID representing a glyph +// +// Baseline +// Glyph shapes are defined relative to a baseline, which is the +// bottom of uppercase characters. Characters extend both above +// and below the baseline. +// +// Current Point +// As you draw text to the screen, you keep track of a "current point" +// which is the origin of each character. The current point's vertical +// position is the baseline. Even "baked fonts" use this model. +// +// Vertical Font Metrics +// The vertical qualities of the font, used to vertically position +// and space the characters. See docs for stbtt_GetFontVMetrics. +// +// Font Size in Pixels or Points +// The preferred interface for specifying font sizes in stb_truetype +// is to specify how tall the font's vertical extent should be in pixels. +// If that sounds good enough, skip the next paragraph. +// +// Most font APIs instead use "points", which are a common typographic +// measurement for describing font size, defined as 72 points per inch. +// stb_truetype provides a point API for compatibility. However, true +// "per inch" conventions don't make much sense on computer displays +// since different monitors have different number of pixels per +// inch. For example, Windows traditionally uses a convention that +// there are 96 pixels per inch, thus making 'inch' measurements have +// nothing to do with inches, and thus effectively defining a point to +// be 1.333 pixels. Additionally, the TrueType font data provides +// an explicit scale factor to scale a given font's glyphs to points, +// but the author has observed that this scale factor is often wrong +// for non-commercial fonts, thus making fonts scaled in points +// according to the TrueType spec incoherently sized in practice. +// +// DETAILED USAGE: +// +// Scale: +// Select how high you want the font to be, in points or pixels. +// Call ScaleForPixelHeight or ScaleForMappingEmToPixels to compute +// a scale factor SF that will be used by all other functions. +// +// Baseline: +// You need to select a y-coordinate that is the baseline of where +// your text will appear. Call GetFontBoundingBox to get the baseline-relative +// bounding box for all characters. SF*-y0 will be the distance in pixels +// that the worst-case character could extend above the baseline, so if +// you want the top edge of characters to appear at the top of the +// screen where y=0, then you would set the baseline to SF*-y0. +// +// Current point: +// Set the current point where the first character will appear. The +// first character could extend left of the current point; this is font +// dependent. You can either choose a current point that is the leftmost +// point and hope, or add some padding, or check the bounding box or +// left-side-bearing of the first character to be displayed and set +// the current point based on that. +// +// Displaying a character: +// Compute the bounding box of the character. It will contain signed values +// relative to . I.e. if it returns x0,y0,x1,y1, +// then the character should be displayed in the rectangle from +// to = 32 && *text < 128) { + stbtt_aligned_quad q; + stbtt_GetBakedQuad(cdata, 512,512, *text-32, &x,&y,&q,1);//1=opengl & d3d10+,0=d3d9 + glTexCoord2f(q.s0,q.t0); glVertex2f(q.x0,q.y0); + glTexCoord2f(q.s1,q.t0); glVertex2f(q.x1,q.y0); + glTexCoord2f(q.s1,q.t1); glVertex2f(q.x1,q.y1); + glTexCoord2f(q.s0,q.t1); glVertex2f(q.x0,q.y1); + } + ++text; + } + glEnd(); +} +#endif +// +// +////////////////////////////////////////////////////////////////////////////// +// +// Complete program (this compiles): get a single bitmap, print as ASCII art +// +#if 0 +#include +#define STB_TRUETYPE_IMPLEMENTATION // force following include to generate implementation +#include "stb_truetype.h" + +char ttf_buffer[1<<25]; + +int main(int argc, char **argv) +{ + stbtt_fontinfo font; + unsigned char *bitmap; + int w,h,i,j,c = (argc > 1 ? atoi(argv[1]) : 'a'), s = (argc > 2 ? atoi(argv[2]) : 20); + + fread(ttf_buffer, 1, 1<<25, fopen(argc > 3 ? argv[3] : "c:/windows/fonts/arialbd.ttf", "rb")); + + stbtt_InitFont(&font, ttf_buffer, stbtt_GetFontOffsetForIndex(ttf_buffer,0)); + bitmap = stbtt_GetCodepointBitmap(&font, 0,stbtt_ScaleForPixelHeight(&font, s), c, &w, &h, 0,0); + + for (j=0; j < h; ++j) { + for (i=0; i < w; ++i) + putchar(" .:ioVM@"[bitmap[j*w+i]>>5]); + putchar('\n'); + } + return 0; +} +#endif +// +// Output: +// +// .ii. +// @@@@@@. +// V@Mio@@o +// :i. V@V +// :oM@@M +// :@@@MM@M +// @@o o@M +// :@@. M@M +// @@@o@@@@ +// :M@@V:@@. +// +////////////////////////////////////////////////////////////////////////////// +// +// Complete program: print "Hello World!" banner, with bugs +// +#if 0 +char buffer[24<<20]; +unsigned char screen[20][79]; + +int main(int arg, char **argv) +{ + stbtt_fontinfo font; + int i,j,ascent,baseline,ch=0; + float scale, xpos=2; // leave a little padding in case the character extends left + char *text = "Heljo World!"; // intentionally misspelled to show 'lj' brokenness + + fread(buffer, 1, 1000000, fopen("c:/windows/fonts/arialbd.ttf", "rb")); + stbtt_InitFont(&font, buffer, 0); + + scale = stbtt_ScaleForPixelHeight(&font, 15); + stbtt_GetFontVMetrics(&font, &ascent,0,0); + baseline = (int) (ascent*scale); + + while (text[ch]) { + int advance,lsb,x0,y0,x1,y1; + float x_shift = xpos - (float) floor(xpos); + stbtt_GetCodepointHMetrics(&font, text[ch], &advance, &lsb); + stbtt_GetCodepointBitmapBoxSubpixel(&font, text[ch], scale,scale,x_shift,0, &x0,&y0,&x1,&y1); + stbtt_MakeCodepointBitmapSubpixel(&font, &screen[baseline + y0][(int) xpos + x0], x1-x0,y1-y0, 79, scale,scale,x_shift,0, text[ch]); + // note that this stomps the old data, so where character boxes overlap (e.g. 'lj') it's wrong + // because this API is really for baking character bitmaps into textures. if you want to render + // a sequence of characters, you really need to render each bitmap to a temp buffer, then + // "alpha blend" that into the working buffer + xpos += (advance * scale); + if (text[ch+1]) + xpos += scale*stbtt_GetCodepointKernAdvance(&font, text[ch],text[ch+1]); + ++ch; + } + + for (j=0; j < 20; ++j) { + for (i=0; i < 78; ++i) + putchar(" .:ioVM@"[screen[j][i]>>5]); + putchar('\n'); + } + + return 0; +} +#endif + + +////////////////////////////////////////////////////////////////////////////// +////////////////////////////////////////////////////////////////////////////// +//// +//// INTEGRATION WITH YOUR CODEBASE +//// +//// The following sections allow you to supply alternate definitions +//// of C library functions used by stb_truetype, e.g. if you don't +//// link with the C runtime library. + +#ifdef STB_TRUETYPE_IMPLEMENTATION + // #define your own (u)stbtt_int8/16/32 before including to override this + #ifndef stbtt_uint8 + typedef unsigned char stbtt_uint8; + typedef signed char stbtt_int8; + typedef unsigned short stbtt_uint16; + typedef signed short stbtt_int16; + typedef unsigned int stbtt_uint32; + typedef signed int stbtt_int32; + #endif + + typedef char stbtt__check_size32[sizeof(stbtt_int32)==4 ? 1 : -1]; + typedef char stbtt__check_size16[sizeof(stbtt_int16)==2 ? 1 : -1]; + + // e.g. #define your own STBTT_ifloor/STBTT_iceil() to avoid math.h + #ifndef STBTT_ifloor + #include + #define STBTT_ifloor(x) ((int) floor(x)) + #define STBTT_iceil(x) ((int) ceil(x)) + #endif + + #ifndef STBTT_sqrt + #include + #define STBTT_sqrt(x) sqrt(x) + #define STBTT_pow(x,y) pow(x,y) + #endif + + #ifndef STBTT_fmod + #include + #define STBTT_fmod(x,y) fmod(x,y) + #endif + + #ifndef STBTT_cos + #include + #define STBTT_cos(x) cos(x) + #define STBTT_acos(x) acos(x) + #endif + + #ifndef STBTT_fabs + #include + #define STBTT_fabs(x) fabs(x) + #endif + + // #define your own functions "STBTT_malloc" / "STBTT_free" to avoid malloc.h + #ifndef STBTT_malloc + #include + #define STBTT_malloc(x,u) ((void)(u),malloc(x)) + #define STBTT_free(x,u) ((void)(u),free(x)) + #endif + + #ifndef STBTT_assert + #include + #define STBTT_assert(x) assert(x) + #endif + + #ifndef STBTT_strlen + #include + #define STBTT_strlen(x) strlen(x) + #endif + + #ifndef STBTT_memcpy + #include + #define STBTT_memcpy memcpy + #define STBTT_memset memset + #endif +#endif + +/////////////////////////////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////// +//// +//// INTERFACE +//// +//// + +#ifndef __STB_INCLUDE_STB_TRUETYPE_H__ +#define __STB_INCLUDE_STB_TRUETYPE_H__ + +#ifdef STBTT_STATIC +#define STBTT_DEF static +#else +#define STBTT_DEF extern +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +// private structure +typedef struct +{ + unsigned char *data; + int cursor; + int size; +} stbtt__buf; + +////////////////////////////////////////////////////////////////////////////// +// +// TEXTURE BAKING API +// +// If you use this API, you only have to call two functions ever. +// + +typedef struct +{ + unsigned short x0,y0,x1,y1; // coordinates of bbox in bitmap + float xoff,yoff,xadvance; +} stbtt_bakedchar; + +STBTT_DEF int stbtt_BakeFontBitmap(const unsigned char *data, int offset, // font location (use offset=0 for plain .ttf) + float pixel_height, // height of font in pixels + unsigned char *pixels, int pw, int ph, // bitmap to be filled in + int first_char, int num_chars, // characters to bake + stbtt_bakedchar *chardata); // you allocate this, it's num_chars long +// if return is positive, the first unused row of the bitmap +// if return is negative, returns the negative of the number of characters that fit +// if return is 0, no characters fit and no rows were used +// This uses a very crappy packing. + +typedef struct +{ + float x0,y0,s0,t0; // top-left + float x1,y1,s1,t1; // bottom-right +} stbtt_aligned_quad; + +STBTT_DEF void stbtt_GetBakedQuad(const stbtt_bakedchar *chardata, int pw, int ph, // same data as above + int char_index, // character to display + float *xpos, float *ypos, // pointers to current position in screen pixel space + stbtt_aligned_quad *q, // output: quad to draw + int opengl_fillrule); // true if opengl fill rule; false if DX9 or earlier +// Call GetBakedQuad with char_index = 'character - first_char', and it +// creates the quad you need to draw and advances the current position. +// +// The coordinate system used assumes y increases downwards. +// +// Characters will extend both above and below the current position; +// see discussion of "BASELINE" above. +// +// It's inefficient; you might want to c&p it and optimize it. + +STBTT_DEF void stbtt_GetScaledFontVMetrics(const unsigned char *fontdata, int index, float size, float *ascent, float *descent, float *lineGap); +// Query the font vertical metrics without having to create a font first. + + +////////////////////////////////////////////////////////////////////////////// +// +// NEW TEXTURE BAKING API +// +// This provides options for packing multiple fonts into one atlas, not +// perfectly but better than nothing. + +typedef struct +{ + unsigned short x0,y0,x1,y1; // coordinates of bbox in bitmap + float xoff,yoff,xadvance; + float xoff2,yoff2; +} stbtt_packedchar; + +typedef struct stbtt_pack_context stbtt_pack_context; +typedef struct stbtt_fontinfo stbtt_fontinfo; +#ifndef STB_RECT_PACK_VERSION +typedef struct stbrp_rect stbrp_rect; +#endif + +STBTT_DEF int stbtt_PackBegin(stbtt_pack_context *spc, unsigned char *pixels, int width, int height, int stride_in_bytes, int padding, void *alloc_context); +// Initializes a packing context stored in the passed-in stbtt_pack_context. +// Future calls using this context will pack characters into the bitmap passed +// in here: a 1-channel bitmap that is width * height. stride_in_bytes is +// the distance from one row to the next (or 0 to mean they are packed tightly +// together). "padding" is the amount of padding to leave between each +// character (normally you want '1' for bitmaps you'll use as textures with +// bilinear filtering). +// +// Returns 0 on failure, 1 on success. + +STBTT_DEF void stbtt_PackEnd (stbtt_pack_context *spc); +// Cleans up the packing context and frees all memory. + +#define STBTT_POINT_SIZE(x) (-(x)) + +STBTT_DEF int stbtt_PackFontRange(stbtt_pack_context *spc, const unsigned char *fontdata, int font_index, float font_size, + int first_unicode_char_in_range, int num_chars_in_range, stbtt_packedchar *chardata_for_range); +// Creates character bitmaps from the font_index'th font found in fontdata (use +// font_index=0 if you don't know what that is). It creates num_chars_in_range +// bitmaps for characters with unicode values starting at first_unicode_char_in_range +// and increasing. Data for how to render them is stored in chardata_for_range; +// pass these to stbtt_GetPackedQuad to get back renderable quads. +// +// font_size is the full height of the character from ascender to descender, +// as computed by stbtt_ScaleForPixelHeight. To use a point size as computed +// by stbtt_ScaleForMappingEmToPixels, wrap the point size in STBTT_POINT_SIZE() +// and pass that result as 'font_size': +// ..., 20 , ... // font max minus min y is 20 pixels tall +// ..., STBTT_POINT_SIZE(20), ... // 'M' is 20 pixels tall + +typedef struct +{ + float font_size; + int first_unicode_codepoint_in_range; // if non-zero, then the chars are continuous, and this is the first codepoint + int *array_of_unicode_codepoints; // if non-zero, then this is an array of unicode codepoints + int num_chars; + stbtt_packedchar *chardata_for_range; // output + unsigned char h_oversample, v_oversample; // don't set these, they're used internally +} stbtt_pack_range; + +STBTT_DEF int stbtt_PackFontRanges(stbtt_pack_context *spc, const unsigned char *fontdata, int font_index, stbtt_pack_range *ranges, int num_ranges); +// Creates character bitmaps from multiple ranges of characters stored in +// ranges. This will usually create a better-packed bitmap than multiple +// calls to stbtt_PackFontRange. Note that you can call this multiple +// times within a single PackBegin/PackEnd. + +STBTT_DEF void stbtt_PackSetOversampling(stbtt_pack_context *spc, unsigned int h_oversample, unsigned int v_oversample); +// Oversampling a font increases the quality by allowing higher-quality subpixel +// positioning, and is especially valuable at smaller text sizes. +// +// This function sets the amount of oversampling for all following calls to +// stbtt_PackFontRange(s) or stbtt_PackFontRangesGatherRects for a given +// pack context. The default (no oversampling) is achieved by h_oversample=1 +// and v_oversample=1. The total number of pixels required is +// h_oversample*v_oversample larger than the default; for example, 2x2 +// oversampling requires 4x the storage of 1x1. For best results, render +// oversampled textures with bilinear filtering. Look at the readme in +// stb/tests/oversample for information about oversampled fonts +// +// To use with PackFontRangesGather etc., you must set it before calls +// call to PackFontRangesGatherRects. + +STBTT_DEF void stbtt_PackSetSkipMissingCodepoints(stbtt_pack_context *spc, int skip); +// If skip != 0, this tells stb_truetype to skip any codepoints for which +// there is no corresponding glyph. If skip=0, which is the default, then +// codepoints without a glyph recived the font's "missing character" glyph, +// typically an empty box by convention. + +STBTT_DEF void stbtt_GetPackedQuad(const stbtt_packedchar *chardata, int pw, int ph, // same data as above + int char_index, // character to display + float *xpos, float *ypos, // pointers to current position in screen pixel space + stbtt_aligned_quad *q, // output: quad to draw + int align_to_integer); + +STBTT_DEF int stbtt_PackFontRangesGatherRects(stbtt_pack_context *spc, const stbtt_fontinfo *info, stbtt_pack_range *ranges, int num_ranges, stbrp_rect *rects); +STBTT_DEF void stbtt_PackFontRangesPackRects(stbtt_pack_context *spc, stbrp_rect *rects, int num_rects); +STBTT_DEF int stbtt_PackFontRangesRenderIntoRects(stbtt_pack_context *spc, const stbtt_fontinfo *info, stbtt_pack_range *ranges, int num_ranges, stbrp_rect *rects); +// Calling these functions in sequence is roughly equivalent to calling +// stbtt_PackFontRanges(). If you more control over the packing of multiple +// fonts, or if you want to pack custom data into a font texture, take a look +// at the source to of stbtt_PackFontRanges() and create a custom version +// using these functions, e.g. call GatherRects multiple times, +// building up a single array of rects, then call PackRects once, +// then call RenderIntoRects repeatedly. This may result in a +// better packing than calling PackFontRanges multiple times +// (or it may not). + +// this is an opaque structure that you shouldn't mess with which holds +// all the context needed from PackBegin to PackEnd. +struct stbtt_pack_context { + void *user_allocator_context; + void *pack_info; + int width; + int height; + int stride_in_bytes; + int padding; + int skip_missing; + unsigned int h_oversample, v_oversample; + unsigned char *pixels; + void *nodes; +}; + +////////////////////////////////////////////////////////////////////////////// +// +// FONT LOADING +// +// + +STBTT_DEF int stbtt_GetNumberOfFonts(const unsigned char *data); +// This function will determine the number of fonts in a font file. TrueType +// collection (.ttc) files may contain multiple fonts, while TrueType font +// (.ttf) files only contain one font. The number of fonts can be used for +// indexing with the previous function where the index is between zero and one +// less than the total fonts. If an error occurs, -1 is returned. + +STBTT_DEF int stbtt_GetFontOffsetForIndex(const unsigned char *data, int index); +// Each .ttf/.ttc file may have more than one font. Each font has a sequential +// index number starting from 0. Call this function to get the font offset for +// a given index; it returns -1 if the index is out of range. A regular .ttf +// file will only define one font and it always be at offset 0, so it will +// return '0' for index 0, and -1 for all other indices. + +// The following structure is defined publicly so you can declare one on +// the stack or as a global or etc, but you should treat it as opaque. +struct stbtt_fontinfo +{ + void * userdata; + unsigned char * data; // pointer to .ttf file + int fontstart; // offset of start of font + + int numGlyphs; // number of glyphs, needed for range checking + + int loca,head,glyf,hhea,hmtx,kern,gpos,svg; // table locations as offset from start of .ttf + int index_map; // a cmap mapping for our chosen character encoding + int indexToLocFormat; // format needed to map from glyph index to glyph + + stbtt__buf cff; // cff font data + stbtt__buf charstrings; // the charstring index + stbtt__buf gsubrs; // global charstring subroutines index + stbtt__buf subrs; // private charstring subroutines index + stbtt__buf fontdicts; // array of font dicts + stbtt__buf fdselect; // map from glyph to fontdict +}; + +STBTT_DEF int stbtt_InitFont(stbtt_fontinfo *info, const unsigned char *data, int offset); +// Given an offset into the file that defines a font, this function builds +// the necessary cached info for the rest of the system. You must allocate +// the stbtt_fontinfo yourself, and stbtt_InitFont will fill it out. You don't +// need to do anything special to free it, because the contents are pure +// value data with no additional data structures. Returns 0 on failure. + + +////////////////////////////////////////////////////////////////////////////// +// +// CHARACTER TO GLYPH-INDEX CONVERSIOn + +STBTT_DEF int stbtt_FindGlyphIndex(const stbtt_fontinfo *info, int unicode_codepoint); +// If you're going to perform multiple operations on the same character +// and you want a speed-up, call this function with the character you're +// going to process, then use glyph-based functions instead of the +// codepoint-based functions. +// Returns 0 if the character codepoint is not defined in the font. + + +////////////////////////////////////////////////////////////////////////////// +// +// CHARACTER PROPERTIES +// + +STBTT_DEF float stbtt_ScaleForPixelHeight(const stbtt_fontinfo *info, float pixels); +// computes a scale factor to produce a font whose "height" is 'pixels' tall. +// Height is measured as the distance from the highest ascender to the lowest +// descender; in other words, it's equivalent to calling stbtt_GetFontVMetrics +// and computing: +// scale = pixels / (ascent - descent) +// so if you prefer to measure height by the ascent only, use a similar calculation. + +STBTT_DEF float stbtt_ScaleForMappingEmToPixels(const stbtt_fontinfo *info, float pixels); +// computes a scale factor to produce a font whose EM size is mapped to +// 'pixels' tall. This is probably what traditional APIs compute, but +// I'm not positive. + +STBTT_DEF void stbtt_GetFontVMetrics(const stbtt_fontinfo *info, int *ascent, int *descent, int *lineGap); +// ascent is the coordinate above the baseline the font extends; descent +// is the coordinate below the baseline the font extends (i.e. it is typically negative) +// lineGap is the spacing between one row's descent and the next row's ascent... +// so you should advance the vertical position by "*ascent - *descent + *lineGap" +// these are expressed in unscaled coordinates, so you must multiply by +// the scale factor for a given size + +STBTT_DEF int stbtt_GetFontVMetricsOS2(const stbtt_fontinfo *info, int *typoAscent, int *typoDescent, int *typoLineGap); +// analogous to GetFontVMetrics, but returns the "typographic" values from the OS/2 +// table (specific to MS/Windows TTF files). +// +// Returns 1 on success (table present), 0 on failure. + +STBTT_DEF void stbtt_GetFontBoundingBox(const stbtt_fontinfo *info, int *x0, int *y0, int *x1, int *y1); +// the bounding box around all possible characters + +STBTT_DEF void stbtt_GetCodepointHMetrics(const stbtt_fontinfo *info, int codepoint, int *advanceWidth, int *leftSideBearing); +// leftSideBearing is the offset from the current horizontal position to the left edge of the character +// advanceWidth is the offset from the current horizontal position to the next horizontal position +// these are expressed in unscaled coordinates + +STBTT_DEF int stbtt_GetCodepointKernAdvance(const stbtt_fontinfo *info, int ch1, int ch2); +// an additional amount to add to the 'advance' value between ch1 and ch2 + +STBTT_DEF int stbtt_GetCodepointBox(const stbtt_fontinfo *info, int codepoint, int *x0, int *y0, int *x1, int *y1); +// Gets the bounding box of the visible part of the glyph, in unscaled coordinates + +STBTT_DEF void stbtt_GetGlyphHMetrics(const stbtt_fontinfo *info, int glyph_index, int *advanceWidth, int *leftSideBearing); +STBTT_DEF int stbtt_GetGlyphKernAdvance(const stbtt_fontinfo *info, int glyph1, int glyph2); +STBTT_DEF int stbtt_GetGlyphBox(const stbtt_fontinfo *info, int glyph_index, int *x0, int *y0, int *x1, int *y1); +// as above, but takes one or more glyph indices for greater efficiency + +typedef struct stbtt_kerningentry +{ + int glyph1; // use stbtt_FindGlyphIndex + int glyph2; + int advance; +} stbtt_kerningentry; + +STBTT_DEF int stbtt_GetKerningTableLength(const stbtt_fontinfo *info); +STBTT_DEF int stbtt_GetKerningTable(const stbtt_fontinfo *info, stbtt_kerningentry* table, int table_length); +// Retrieves a complete list of all of the kerning pairs provided by the font +// stbtt_GetKerningTable never writes more than table_length entries and returns how many entries it did write. +// The table will be sorted by (a.glyph1 == b.glyph1)?(a.glyph2 < b.glyph2):(a.glyph1 < b.glyph1) + +////////////////////////////////////////////////////////////////////////////// +// +// GLYPH SHAPES (you probably don't need these, but they have to go before +// the bitmaps for C declaration-order reasons) +// + +#ifndef STBTT_vmove // you can predefine these to use different values (but why?) + enum { + STBTT_vmove=1, + STBTT_vline, + STBTT_vcurve, + STBTT_vcubic + }; +#endif + +#ifndef stbtt_vertex // you can predefine this to use different values + // (we share this with other code at RAD) + #define stbtt_vertex_type short // can't use stbtt_int16 because that's not visible in the header file + typedef struct + { + stbtt_vertex_type x,y,cx,cy,cx1,cy1; + unsigned char type,padding; + } stbtt_vertex; +#endif + +STBTT_DEF int stbtt_IsGlyphEmpty(const stbtt_fontinfo *info, int glyph_index); +// returns non-zero if nothing is drawn for this glyph + +STBTT_DEF int stbtt_GetCodepointShape(const stbtt_fontinfo *info, int unicode_codepoint, stbtt_vertex **vertices); +STBTT_DEF int stbtt_GetGlyphShape(const stbtt_fontinfo *info, int glyph_index, stbtt_vertex **vertices); +// returns # of vertices and fills *vertices with the pointer to them +// these are expressed in "unscaled" coordinates +// +// The shape is a series of contours. Each one starts with +// a STBTT_moveto, then consists of a series of mixed +// STBTT_lineto and STBTT_curveto segments. A lineto +// draws a line from previous endpoint to its x,y; a curveto +// draws a quadratic bezier from previous endpoint to +// its x,y, using cx,cy as the bezier control point. + +STBTT_DEF void stbtt_FreeShape(const stbtt_fontinfo *info, stbtt_vertex *vertices); +// frees the data allocated above + +STBTT_DEF unsigned char *stbtt_FindSVGDoc(const stbtt_fontinfo *info, int gl); +STBTT_DEF int stbtt_GetCodepointSVG(const stbtt_fontinfo *info, int unicode_codepoint, const char **svg); +STBTT_DEF int stbtt_GetGlyphSVG(const stbtt_fontinfo *info, int gl, const char **svg); +// fills svg with the character's SVG data. +// returns data size or 0 if SVG not found. + +////////////////////////////////////////////////////////////////////////////// +// +// BITMAP RENDERING +// + +STBTT_DEF void stbtt_FreeBitmap(unsigned char *bitmap, void *userdata); +// frees the bitmap allocated below + +STBTT_DEF unsigned char *stbtt_GetCodepointBitmap(const stbtt_fontinfo *info, float scale_x, float scale_y, int codepoint, int *width, int *height, int *xoff, int *yoff); +// allocates a large-enough single-channel 8bpp bitmap and renders the +// specified character/glyph at the specified scale into it, with +// antialiasing. 0 is no coverage (transparent), 255 is fully covered (opaque). +// *width & *height are filled out with the width & height of the bitmap, +// which is stored left-to-right, top-to-bottom. +// +// xoff/yoff are the offset it pixel space from the glyph origin to the top-left of the bitmap + +STBTT_DEF unsigned char *stbtt_GetCodepointBitmapSubpixel(const stbtt_fontinfo *info, float scale_x, float scale_y, float shift_x, float shift_y, int codepoint, int *width, int *height, int *xoff, int *yoff); +// the same as stbtt_GetCodepoitnBitmap, but you can specify a subpixel +// shift for the character + +STBTT_DEF void stbtt_MakeCodepointBitmap(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, int codepoint); +// the same as stbtt_GetCodepointBitmap, but you pass in storage for the bitmap +// in the form of 'output', with row spacing of 'out_stride' bytes. the bitmap +// is clipped to out_w/out_h bytes. Call stbtt_GetCodepointBitmapBox to get the +// width and height and positioning info for it first. + +STBTT_DEF void stbtt_MakeCodepointBitmapSubpixel(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, float shift_x, float shift_y, int codepoint); +// same as stbtt_MakeCodepointBitmap, but you can specify a subpixel +// shift for the character + +STBTT_DEF void stbtt_MakeCodepointBitmapSubpixelPrefilter(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, float shift_x, float shift_y, int oversample_x, int oversample_y, float *sub_x, float *sub_y, int codepoint); +// same as stbtt_MakeCodepointBitmapSubpixel, but prefiltering +// is performed (see stbtt_PackSetOversampling) + +STBTT_DEF void stbtt_GetCodepointBitmapBox(const stbtt_fontinfo *font, int codepoint, float scale_x, float scale_y, int *ix0, int *iy0, int *ix1, int *iy1); +// get the bbox of the bitmap centered around the glyph origin; so the +// bitmap width is ix1-ix0, height is iy1-iy0, and location to place +// the bitmap top left is (leftSideBearing*scale,iy0). +// (Note that the bitmap uses y-increases-down, but the shape uses +// y-increases-up, so CodepointBitmapBox and CodepointBox are inverted.) + +STBTT_DEF void stbtt_GetCodepointBitmapBoxSubpixel(const stbtt_fontinfo *font, int codepoint, float scale_x, float scale_y, float shift_x, float shift_y, int *ix0, int *iy0, int *ix1, int *iy1); +// same as stbtt_GetCodepointBitmapBox, but you can specify a subpixel +// shift for the character + +// the following functions are equivalent to the above functions, but operate +// on glyph indices instead of Unicode codepoints (for efficiency) +STBTT_DEF unsigned char *stbtt_GetGlyphBitmap(const stbtt_fontinfo *info, float scale_x, float scale_y, int glyph, int *width, int *height, int *xoff, int *yoff); +STBTT_DEF unsigned char *stbtt_GetGlyphBitmapSubpixel(const stbtt_fontinfo *info, float scale_x, float scale_y, float shift_x, float shift_y, int glyph, int *width, int *height, int *xoff, int *yoff); +STBTT_DEF void stbtt_MakeGlyphBitmap(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, int glyph); +STBTT_DEF void stbtt_MakeGlyphBitmapSubpixel(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, float shift_x, float shift_y, int glyph); +STBTT_DEF void stbtt_MakeGlyphBitmapSubpixelPrefilter(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, float shift_x, float shift_y, int oversample_x, int oversample_y, float *sub_x, float *sub_y, int glyph); +STBTT_DEF void stbtt_GetGlyphBitmapBox(const stbtt_fontinfo *font, int glyph, float scale_x, float scale_y, int *ix0, int *iy0, int *ix1, int *iy1); +STBTT_DEF void stbtt_GetGlyphBitmapBoxSubpixel(const stbtt_fontinfo *font, int glyph, float scale_x, float scale_y,float shift_x, float shift_y, int *ix0, int *iy0, int *ix1, int *iy1); + + +// @TODO: don't expose this structure +typedef struct +{ + int w,h,stride; + unsigned char *pixels; +} stbtt__bitmap; + +// rasterize a shape with quadratic beziers into a bitmap +STBTT_DEF void stbtt_Rasterize(stbtt__bitmap *result, // 1-channel bitmap to draw into + float flatness_in_pixels, // allowable error of curve in pixels + stbtt_vertex *vertices, // array of vertices defining shape + int num_verts, // number of vertices in above array + float scale_x, float scale_y, // scale applied to input vertices + float shift_x, float shift_y, // translation applied to input vertices + int x_off, int y_off, // another translation applied to input + int invert, // if non-zero, vertically flip shape + void *userdata); // context for to STBTT_MALLOC + +////////////////////////////////////////////////////////////////////////////// +// +// Signed Distance Function (or Field) rendering + +STBTT_DEF void stbtt_FreeSDF(unsigned char *bitmap, void *userdata); +// frees the SDF bitmap allocated below + +STBTT_DEF unsigned char * stbtt_GetGlyphSDF(const stbtt_fontinfo *info, float scale, int glyph, int padding, unsigned char onedge_value, float pixel_dist_scale, int *width, int *height, int *xoff, int *yoff); +STBTT_DEF unsigned char * stbtt_GetCodepointSDF(const stbtt_fontinfo *info, float scale, int codepoint, int padding, unsigned char onedge_value, float pixel_dist_scale, int *width, int *height, int *xoff, int *yoff); +// These functions compute a discretized SDF field for a single character, suitable for storing +// in a single-channel texture, sampling with bilinear filtering, and testing against +// larger than some threshold to produce scalable fonts. +// info -- the font +// scale -- controls the size of the resulting SDF bitmap, same as it would be creating a regular bitmap +// glyph/codepoint -- the character to generate the SDF for +// padding -- extra "pixels" around the character which are filled with the distance to the character (not 0), +// which allows effects like bit outlines +// onedge_value -- value 0-255 to test the SDF against to reconstruct the character (i.e. the isocontour of the character) +// pixel_dist_scale -- what value the SDF should increase by when moving one SDF "pixel" away from the edge (on the 0..255 scale) +// if positive, > onedge_value is inside; if negative, < onedge_value is inside +// width,height -- output height & width of the SDF bitmap (including padding) +// xoff,yoff -- output origin of the character +// return value -- a 2D array of bytes 0..255, width*height in size +// +// pixel_dist_scale & onedge_value are a scale & bias that allows you to make +// optimal use of the limited 0..255 for your application, trading off precision +// and special effects. SDF values outside the range 0..255 are clamped to 0..255. +// +// Example: +// scale = stbtt_ScaleForPixelHeight(22) +// padding = 5 +// onedge_value = 180 +// pixel_dist_scale = 180/5.0 = 36.0 +// +// This will create an SDF bitmap in which the character is about 22 pixels +// high but the whole bitmap is about 22+5+5=32 pixels high. To produce a filled +// shape, sample the SDF at each pixel and fill the pixel if the SDF value +// is greater than or equal to 180/255. (You'll actually want to antialias, +// which is beyond the scope of this example.) Additionally, you can compute +// offset outlines (e.g. to stroke the character border inside & outside, +// or only outside). For example, to fill outside the character up to 3 SDF +// pixels, you would compare against (180-36.0*3)/255 = 72/255. The above +// choice of variables maps a range from 5 pixels outside the shape to +// 2 pixels inside the shape to 0..255; this is intended primarily for apply +// outside effects only (the interior range is needed to allow proper +// antialiasing of the font at *smaller* sizes) +// +// The function computes the SDF analytically at each SDF pixel, not by e.g. +// building a higher-res bitmap and approximating it. In theory the quality +// should be as high as possible for an SDF of this size & representation, but +// unclear if this is true in practice (perhaps building a higher-res bitmap +// and computing from that can allow drop-out prevention). +// +// The algorithm has not been optimized at all, so expect it to be slow +// if computing lots of characters or very large sizes. + + + +////////////////////////////////////////////////////////////////////////////// +// +// Finding the right font... +// +// You should really just solve this offline, keep your own tables +// of what font is what, and don't try to get it out of the .ttf file. +// That's because getting it out of the .ttf file is really hard, because +// the names in the file can appear in many possible encodings, in many +// possible languages, and e.g. if you need a case-insensitive comparison, +// the details of that depend on the encoding & language in a complex way +// (actually underspecified in truetype, but also gigantic). +// +// But you can use the provided functions in two possible ways: +// stbtt_FindMatchingFont() will use *case-sensitive* comparisons on +// unicode-encoded names to try to find the font you want; +// you can run this before calling stbtt_InitFont() +// +// stbtt_GetFontNameString() lets you get any of the various strings +// from the file yourself and do your own comparisons on them. +// You have to have called stbtt_InitFont() first. + + +STBTT_DEF int stbtt_FindMatchingFont(const unsigned char *fontdata, const char *name, int flags); +// returns the offset (not index) of the font that matches, or -1 if none +// if you use STBTT_MACSTYLE_DONTCARE, use a font name like "Arial Bold". +// if you use any other flag, use a font name like "Arial"; this checks +// the 'macStyle' header field; i don't know if fonts set this consistently +#define STBTT_MACSTYLE_DONTCARE 0 +#define STBTT_MACSTYLE_BOLD 1 +#define STBTT_MACSTYLE_ITALIC 2 +#define STBTT_MACSTYLE_UNDERSCORE 4 +#define STBTT_MACSTYLE_NONE 8 // <= not same as 0, this makes us check the bitfield is 0 + +STBTT_DEF int stbtt_CompareUTF8toUTF16_bigendian(const char *s1, int len1, const char *s2, int len2); +// returns 1/0 whether the first string interpreted as utf8 is identical to +// the second string interpreted as big-endian utf16... useful for strings from next func + +STBTT_DEF const char *stbtt_GetFontNameString(const stbtt_fontinfo *font, int *length, int platformID, int encodingID, int languageID, int nameID); +// returns the string (which may be big-endian double byte, e.g. for unicode) +// and puts the length in bytes in *length. +// +// some of the values for the IDs are below; for more see the truetype spec: +// http://developer.apple.com/textfonts/TTRefMan/RM06/Chap6name.html +// http://www.microsoft.com/typography/otspec/name.htm + +enum { // platformID + STBTT_PLATFORM_ID_UNICODE =0, + STBTT_PLATFORM_ID_MAC =1, + STBTT_PLATFORM_ID_ISO =2, + STBTT_PLATFORM_ID_MICROSOFT =3 +}; + +enum { // encodingID for STBTT_PLATFORM_ID_UNICODE + STBTT_UNICODE_EID_UNICODE_1_0 =0, + STBTT_UNICODE_EID_UNICODE_1_1 =1, + STBTT_UNICODE_EID_ISO_10646 =2, + STBTT_UNICODE_EID_UNICODE_2_0_BMP=3, + STBTT_UNICODE_EID_UNICODE_2_0_FULL=4 +}; + +enum { // encodingID for STBTT_PLATFORM_ID_MICROSOFT + STBTT_MS_EID_SYMBOL =0, + STBTT_MS_EID_UNICODE_BMP =1, + STBTT_MS_EID_SHIFTJIS =2, + STBTT_MS_EID_UNICODE_FULL =10 +}; + +enum { // encodingID for STBTT_PLATFORM_ID_MAC; same as Script Manager codes + STBTT_MAC_EID_ROMAN =0, STBTT_MAC_EID_ARABIC =4, + STBTT_MAC_EID_JAPANESE =1, STBTT_MAC_EID_HEBREW =5, + STBTT_MAC_EID_CHINESE_TRAD =2, STBTT_MAC_EID_GREEK =6, + STBTT_MAC_EID_KOREAN =3, STBTT_MAC_EID_RUSSIAN =7 +}; + +enum { // languageID for STBTT_PLATFORM_ID_MICROSOFT; same as LCID... + // problematic because there are e.g. 16 english LCIDs and 16 arabic LCIDs + STBTT_MS_LANG_ENGLISH =0x0409, STBTT_MS_LANG_ITALIAN =0x0410, + STBTT_MS_LANG_CHINESE =0x0804, STBTT_MS_LANG_JAPANESE =0x0411, + STBTT_MS_LANG_DUTCH =0x0413, STBTT_MS_LANG_KOREAN =0x0412, + STBTT_MS_LANG_FRENCH =0x040c, STBTT_MS_LANG_RUSSIAN =0x0419, + STBTT_MS_LANG_GERMAN =0x0407, STBTT_MS_LANG_SPANISH =0x0409, + STBTT_MS_LANG_HEBREW =0x040d, STBTT_MS_LANG_SWEDISH =0x041D +}; + +enum { // languageID for STBTT_PLATFORM_ID_MAC + STBTT_MAC_LANG_ENGLISH =0 , STBTT_MAC_LANG_JAPANESE =11, + STBTT_MAC_LANG_ARABIC =12, STBTT_MAC_LANG_KOREAN =23, + STBTT_MAC_LANG_DUTCH =4 , STBTT_MAC_LANG_RUSSIAN =32, + STBTT_MAC_LANG_FRENCH =1 , STBTT_MAC_LANG_SPANISH =6 , + STBTT_MAC_LANG_GERMAN =2 , STBTT_MAC_LANG_SWEDISH =5 , + STBTT_MAC_LANG_HEBREW =10, STBTT_MAC_LANG_CHINESE_SIMPLIFIED =33, + STBTT_MAC_LANG_ITALIAN =3 , STBTT_MAC_LANG_CHINESE_TRAD =19 +}; + +#ifdef __cplusplus +} +#endif + +#endif // __STB_INCLUDE_STB_TRUETYPE_H__ + +/////////////////////////////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////////// +//// +//// IMPLEMENTATION +//// +//// + +#ifdef STB_TRUETYPE_IMPLEMENTATION + +#ifndef STBTT_MAX_OVERSAMPLE +#define STBTT_MAX_OVERSAMPLE 8 +#endif + +#if STBTT_MAX_OVERSAMPLE > 255 +#error "STBTT_MAX_OVERSAMPLE cannot be > 255" +#endif + +typedef int stbtt__test_oversample_pow2[(STBTT_MAX_OVERSAMPLE & (STBTT_MAX_OVERSAMPLE-1)) == 0 ? 1 : -1]; + +#ifndef STBTT_RASTERIZER_VERSION +#define STBTT_RASTERIZER_VERSION 2 +#endif + +#ifdef _MSC_VER +#define STBTT__NOTUSED(v) (void)(v) +#else +#define STBTT__NOTUSED(v) (void)sizeof(v) +#endif + +////////////////////////////////////////////////////////////////////////// +// +// stbtt__buf helpers to parse data from file +// + +static stbtt_uint8 stbtt__buf_get8(stbtt__buf *b) +{ + if (b->cursor >= b->size) + return 0; + return b->data[b->cursor++]; +} + +static stbtt_uint8 stbtt__buf_peek8(stbtt__buf *b) +{ + if (b->cursor >= b->size) + return 0; + return b->data[b->cursor]; +} + +static void stbtt__buf_seek(stbtt__buf *b, int o) +{ + STBTT_assert(!(o > b->size || o < 0)); + b->cursor = (o > b->size || o < 0) ? b->size : o; +} + +static void stbtt__buf_skip(stbtt__buf *b, int o) +{ + stbtt__buf_seek(b, b->cursor + o); +} + +static stbtt_uint32 stbtt__buf_get(stbtt__buf *b, int n) +{ + stbtt_uint32 v = 0; + int i; + STBTT_assert(n >= 1 && n <= 4); + for (i = 0; i < n; i++) + v = (v << 8) | stbtt__buf_get8(b); + return v; +} + +static stbtt__buf stbtt__new_buf(const void *p, size_t size) +{ + stbtt__buf r; + STBTT_assert(size < 0x40000000); + r.data = (stbtt_uint8*) p; + r.size = (int) size; + r.cursor = 0; + return r; +} + +#define stbtt__buf_get16(b) stbtt__buf_get((b), 2) +#define stbtt__buf_get32(b) stbtt__buf_get((b), 4) + +static stbtt__buf stbtt__buf_range(const stbtt__buf *b, int o, int s) +{ + stbtt__buf r = stbtt__new_buf(NULL, 0); + if (o < 0 || s < 0 || o > b->size || s > b->size - o) return r; + r.data = b->data + o; + r.size = s; + return r; +} + +static stbtt__buf stbtt__cff_get_index(stbtt__buf *b) +{ + int count, start, offsize; + start = b->cursor; + count = stbtt__buf_get16(b); + if (count) { + offsize = stbtt__buf_get8(b); + STBTT_assert(offsize >= 1 && offsize <= 4); + stbtt__buf_skip(b, offsize * count); + stbtt__buf_skip(b, stbtt__buf_get(b, offsize) - 1); + } + return stbtt__buf_range(b, start, b->cursor - start); +} + +static stbtt_uint32 stbtt__cff_int(stbtt__buf *b) +{ + int b0 = stbtt__buf_get8(b); + if (b0 >= 32 && b0 <= 246) return b0 - 139; + else if (b0 >= 247 && b0 <= 250) return (b0 - 247)*256 + stbtt__buf_get8(b) + 108; + else if (b0 >= 251 && b0 <= 254) return -(b0 - 251)*256 - stbtt__buf_get8(b) - 108; + else if (b0 == 28) return stbtt__buf_get16(b); + else if (b0 == 29) return stbtt__buf_get32(b); + STBTT_assert(0); + return 0; +} + +static void stbtt__cff_skip_operand(stbtt__buf *b) { + int v, b0 = stbtt__buf_peek8(b); + STBTT_assert(b0 >= 28); + if (b0 == 30) { + stbtt__buf_skip(b, 1); + while (b->cursor < b->size) { + v = stbtt__buf_get8(b); + if ((v & 0xF) == 0xF || (v >> 4) == 0xF) + break; + } + } else { + stbtt__cff_int(b); + } +} + +static stbtt__buf stbtt__dict_get(stbtt__buf *b, int key) +{ + stbtt__buf_seek(b, 0); + while (b->cursor < b->size) { + int start = b->cursor, end, op; + while (stbtt__buf_peek8(b) >= 28) + stbtt__cff_skip_operand(b); + end = b->cursor; + op = stbtt__buf_get8(b); + if (op == 12) op = stbtt__buf_get8(b) | 0x100; + if (op == key) return stbtt__buf_range(b, start, end-start); + } + return stbtt__buf_range(b, 0, 0); +} + +static void stbtt__dict_get_ints(stbtt__buf *b, int key, int outcount, stbtt_uint32 *out) +{ + int i; + stbtt__buf operands = stbtt__dict_get(b, key); + for (i = 0; i < outcount && operands.cursor < operands.size; i++) + out[i] = stbtt__cff_int(&operands); +} + +static int stbtt__cff_index_count(stbtt__buf *b) +{ + stbtt__buf_seek(b, 0); + return stbtt__buf_get16(b); +} + +static stbtt__buf stbtt__cff_index_get(stbtt__buf b, int i) +{ + int count, offsize, start, end; + stbtt__buf_seek(&b, 0); + count = stbtt__buf_get16(&b); + offsize = stbtt__buf_get8(&b); + STBTT_assert(i >= 0 && i < count); + STBTT_assert(offsize >= 1 && offsize <= 4); + stbtt__buf_skip(&b, i*offsize); + start = stbtt__buf_get(&b, offsize); + end = stbtt__buf_get(&b, offsize); + return stbtt__buf_range(&b, 2+(count+1)*offsize+start, end - start); +} + +////////////////////////////////////////////////////////////////////////// +// +// accessors to parse data from file +// + +// on platforms that don't allow misaligned reads, if we want to allow +// truetype fonts that aren't padded to alignment, define ALLOW_UNALIGNED_TRUETYPE + +#define ttBYTE(p) (* (stbtt_uint8 *) (p)) +#define ttCHAR(p) (* (stbtt_int8 *) (p)) +#define ttFixed(p) ttLONG(p) + +static stbtt_uint16 ttUSHORT(stbtt_uint8 *p) { return p[0]*256 + p[1]; } +static stbtt_int16 ttSHORT(stbtt_uint8 *p) { return p[0]*256 + p[1]; } +static stbtt_uint32 ttULONG(stbtt_uint8 *p) { return (p[0]<<24) + (p[1]<<16) + (p[2]<<8) + p[3]; } +static stbtt_int32 ttLONG(stbtt_uint8 *p) { return (p[0]<<24) + (p[1]<<16) + (p[2]<<8) + p[3]; } + +#define stbtt_tag4(p,c0,c1,c2,c3) ((p)[0] == (c0) && (p)[1] == (c1) && (p)[2] == (c2) && (p)[3] == (c3)) +#define stbtt_tag(p,str) stbtt_tag4(p,str[0],str[1],str[2],str[3]) + +static int stbtt__isfont(stbtt_uint8 *font) +{ + // check the version number + if (stbtt_tag4(font, '1',0,0,0)) return 1; // TrueType 1 + if (stbtt_tag(font, "typ1")) return 1; // TrueType with type 1 font -- we don't support this! + if (stbtt_tag(font, "OTTO")) return 1; // OpenType with CFF + if (stbtt_tag4(font, 0,1,0,0)) return 1; // OpenType 1.0 + if (stbtt_tag(font, "true")) return 1; // Apple specification for TrueType fonts + return 0; +} + +// @OPTIMIZE: binary search +static stbtt_uint32 stbtt__find_table(stbtt_uint8 *data, stbtt_uint32 fontstart, const char *tag) +{ + stbtt_int32 num_tables = ttUSHORT(data+fontstart+4); + stbtt_uint32 tabledir = fontstart + 12; + stbtt_int32 i; + for (i=0; i < num_tables; ++i) { + stbtt_uint32 loc = tabledir + 16*i; + if (stbtt_tag(data+loc+0, tag)) + return ttULONG(data+loc+8); + } + return 0; +} + +static int stbtt_GetFontOffsetForIndex_internal(unsigned char *font_collection, int index) +{ + // if it's just a font, there's only one valid index + if (stbtt__isfont(font_collection)) + return index == 0 ? 0 : -1; + + // check if it's a TTC + if (stbtt_tag(font_collection, "ttcf")) { + // version 1? + if (ttULONG(font_collection+4) == 0x00010000 || ttULONG(font_collection+4) == 0x00020000) { + stbtt_int32 n = ttLONG(font_collection+8); + if (index >= n) + return -1; + return ttULONG(font_collection+12+index*4); + } + } + return -1; +} + +static int stbtt_GetNumberOfFonts_internal(unsigned char *font_collection) +{ + // if it's just a font, there's only one valid font + if (stbtt__isfont(font_collection)) + return 1; + + // check if it's a TTC + if (stbtt_tag(font_collection, "ttcf")) { + // version 1? + if (ttULONG(font_collection+4) == 0x00010000 || ttULONG(font_collection+4) == 0x00020000) { + return ttLONG(font_collection+8); + } + } + return 0; +} + +static stbtt__buf stbtt__get_subrs(stbtt__buf cff, stbtt__buf fontdict) +{ + stbtt_uint32 subrsoff = 0, private_loc[2] = { 0, 0 }; + stbtt__buf pdict; + stbtt__dict_get_ints(&fontdict, 18, 2, private_loc); + if (!private_loc[1] || !private_loc[0]) return stbtt__new_buf(NULL, 0); + pdict = stbtt__buf_range(&cff, private_loc[1], private_loc[0]); + stbtt__dict_get_ints(&pdict, 19, 1, &subrsoff); + if (!subrsoff) return stbtt__new_buf(NULL, 0); + stbtt__buf_seek(&cff, private_loc[1]+subrsoff); + return stbtt__cff_get_index(&cff); +} + +// since most people won't use this, find this table the first time it's needed +static int stbtt__get_svg(stbtt_fontinfo *info) +{ + stbtt_uint32 t; + if (info->svg < 0) { + t = stbtt__find_table(info->data, info->fontstart, "SVG "); + if (t) { + stbtt_uint32 offset = ttULONG(info->data + t + 2); + info->svg = t + offset; + } else { + info->svg = 0; + } + } + return info->svg; +} + +static int stbtt_InitFont_internal(stbtt_fontinfo *info, unsigned char *data, int fontstart) +{ + stbtt_uint32 cmap, t; + stbtt_int32 i,numTables; + + info->data = data; + info->fontstart = fontstart; + info->cff = stbtt__new_buf(NULL, 0); + + cmap = stbtt__find_table(data, fontstart, "cmap"); // required + info->loca = stbtt__find_table(data, fontstart, "loca"); // required + info->head = stbtt__find_table(data, fontstart, "head"); // required + info->glyf = stbtt__find_table(data, fontstart, "glyf"); // required + info->hhea = stbtt__find_table(data, fontstart, "hhea"); // required + info->hmtx = stbtt__find_table(data, fontstart, "hmtx"); // required + info->kern = stbtt__find_table(data, fontstart, "kern"); // not required + info->gpos = stbtt__find_table(data, fontstart, "GPOS"); // not required + + if (!cmap || !info->head || !info->hhea || !info->hmtx) + return 0; + if (info->glyf) { + // required for truetype + if (!info->loca) return 0; + } else { + // initialization for CFF / Type2 fonts (OTF) + stbtt__buf b, topdict, topdictidx; + stbtt_uint32 cstype = 2, charstrings = 0, fdarrayoff = 0, fdselectoff = 0; + stbtt_uint32 cff; + + cff = stbtt__find_table(data, fontstart, "CFF "); + if (!cff) return 0; + + info->fontdicts = stbtt__new_buf(NULL, 0); + info->fdselect = stbtt__new_buf(NULL, 0); + + // @TODO this should use size from table (not 512MB) + info->cff = stbtt__new_buf(data+cff, 512*1024*1024); + b = info->cff; + + // read the header + stbtt__buf_skip(&b, 2); + stbtt__buf_seek(&b, stbtt__buf_get8(&b)); // hdrsize + + // @TODO the name INDEX could list multiple fonts, + // but we just use the first one. + stbtt__cff_get_index(&b); // name INDEX + topdictidx = stbtt__cff_get_index(&b); + topdict = stbtt__cff_index_get(topdictidx, 0); + stbtt__cff_get_index(&b); // string INDEX + info->gsubrs = stbtt__cff_get_index(&b); + + stbtt__dict_get_ints(&topdict, 17, 1, &charstrings); + stbtt__dict_get_ints(&topdict, 0x100 | 6, 1, &cstype); + stbtt__dict_get_ints(&topdict, 0x100 | 36, 1, &fdarrayoff); + stbtt__dict_get_ints(&topdict, 0x100 | 37, 1, &fdselectoff); + info->subrs = stbtt__get_subrs(b, topdict); + + // we only support Type 2 charstrings + if (cstype != 2) return 0; + if (charstrings == 0) return 0; + + if (fdarrayoff) { + // looks like a CID font + if (!fdselectoff) return 0; + stbtt__buf_seek(&b, fdarrayoff); + info->fontdicts = stbtt__cff_get_index(&b); + info->fdselect = stbtt__buf_range(&b, fdselectoff, b.size-fdselectoff); + } + + stbtt__buf_seek(&b, charstrings); + info->charstrings = stbtt__cff_get_index(&b); + } + + t = stbtt__find_table(data, fontstart, "maxp"); + if (t) + info->numGlyphs = ttUSHORT(data+t+4); + else + info->numGlyphs = 0xffff; + + info->svg = -1; + + // find a cmap encoding table we understand *now* to avoid searching + // later. (todo: could make this installable) + // the same regardless of glyph. + numTables = ttUSHORT(data + cmap + 2); + info->index_map = 0; + for (i=0; i < numTables; ++i) { + stbtt_uint32 encoding_record = cmap + 4 + 8 * i; + // find an encoding we understand: + switch(ttUSHORT(data+encoding_record)) { + case STBTT_PLATFORM_ID_MICROSOFT: + switch (ttUSHORT(data+encoding_record+2)) { + case STBTT_MS_EID_UNICODE_BMP: + case STBTT_MS_EID_UNICODE_FULL: + // MS/Unicode + info->index_map = cmap + ttULONG(data+encoding_record+4); + break; + } + break; + case STBTT_PLATFORM_ID_UNICODE: + // Mac/iOS has these + // all the encodingIDs are unicode, so we don't bother to check it + info->index_map = cmap + ttULONG(data+encoding_record+4); + break; + } + } + if (info->index_map == 0) + return 0; + + info->indexToLocFormat = ttUSHORT(data+info->head + 50); + return 1; +} + +STBTT_DEF int stbtt_FindGlyphIndex(const stbtt_fontinfo *info, int unicode_codepoint) +{ + stbtt_uint8 *data = info->data; + stbtt_uint32 index_map = info->index_map; + + stbtt_uint16 format = ttUSHORT(data + index_map + 0); + if (format == 0) { // apple byte encoding + stbtt_int32 bytes = ttUSHORT(data + index_map + 2); + if (unicode_codepoint < bytes-6) + return ttBYTE(data + index_map + 6 + unicode_codepoint); + return 0; + } else if (format == 6) { + stbtt_uint32 first = ttUSHORT(data + index_map + 6); + stbtt_uint32 count = ttUSHORT(data + index_map + 8); + if ((stbtt_uint32) unicode_codepoint >= first && (stbtt_uint32) unicode_codepoint < first+count) + return ttUSHORT(data + index_map + 10 + (unicode_codepoint - first)*2); + return 0; + } else if (format == 2) { + STBTT_assert(0); // @TODO: high-byte mapping for japanese/chinese/korean + return 0; + } else if (format == 4) { // standard mapping for windows fonts: binary search collection of ranges + stbtt_uint16 segcount = ttUSHORT(data+index_map+6) >> 1; + stbtt_uint16 searchRange = ttUSHORT(data+index_map+8) >> 1; + stbtt_uint16 entrySelector = ttUSHORT(data+index_map+10); + stbtt_uint16 rangeShift = ttUSHORT(data+index_map+12) >> 1; + + // do a binary search of the segments + stbtt_uint32 endCount = index_map + 14; + stbtt_uint32 search = endCount; + + if (unicode_codepoint > 0xffff) + return 0; + + // they lie from endCount .. endCount + segCount + // but searchRange is the nearest power of two, so... + if (unicode_codepoint >= ttUSHORT(data + search + rangeShift*2)) + search += rangeShift*2; + + // now decrement to bias correctly to find smallest + search -= 2; + while (entrySelector) { + stbtt_uint16 end; + searchRange >>= 1; + end = ttUSHORT(data + search + searchRange*2); + if (unicode_codepoint > end) + search += searchRange*2; + --entrySelector; + } + search += 2; + + { + stbtt_uint16 offset, start, last; + stbtt_uint16 item = (stbtt_uint16) ((search - endCount) >> 1); + + start = ttUSHORT(data + index_map + 14 + segcount*2 + 2 + 2*item); + last = ttUSHORT(data + endCount + 2*item); + if (unicode_codepoint < start || unicode_codepoint > last) + return 0; + + offset = ttUSHORT(data + index_map + 14 + segcount*6 + 2 + 2*item); + if (offset == 0) + return (stbtt_uint16) (unicode_codepoint + ttSHORT(data + index_map + 14 + segcount*4 + 2 + 2*item)); + + return ttUSHORT(data + offset + (unicode_codepoint-start)*2 + index_map + 14 + segcount*6 + 2 + 2*item); + } + } else if (format == 12 || format == 13) { + stbtt_uint32 ngroups = ttULONG(data+index_map+12); + stbtt_int32 low,high; + low = 0; high = (stbtt_int32)ngroups; + // Binary search the right group. + while (low < high) { + stbtt_int32 mid = low + ((high-low) >> 1); // rounds down, so low <= mid < high + stbtt_uint32 start_char = ttULONG(data+index_map+16+mid*12); + stbtt_uint32 end_char = ttULONG(data+index_map+16+mid*12+4); + if ((stbtt_uint32) unicode_codepoint < start_char) + high = mid; + else if ((stbtt_uint32) unicode_codepoint > end_char) + low = mid+1; + else { + stbtt_uint32 start_glyph = ttULONG(data+index_map+16+mid*12+8); + if (format == 12) + return start_glyph + unicode_codepoint-start_char; + else // format == 13 + return start_glyph; + } + } + return 0; // not found + } + // @TODO + STBTT_assert(0); + return 0; +} + +STBTT_DEF int stbtt_GetCodepointShape(const stbtt_fontinfo *info, int unicode_codepoint, stbtt_vertex **vertices) +{ + return stbtt_GetGlyphShape(info, stbtt_FindGlyphIndex(info, unicode_codepoint), vertices); +} + +static void stbtt_setvertex(stbtt_vertex *v, stbtt_uint8 type, stbtt_int32 x, stbtt_int32 y, stbtt_int32 cx, stbtt_int32 cy) +{ + v->type = type; + v->x = (stbtt_int16) x; + v->y = (stbtt_int16) y; + v->cx = (stbtt_int16) cx; + v->cy = (stbtt_int16) cy; +} + +static int stbtt__GetGlyfOffset(const stbtt_fontinfo *info, int glyph_index) +{ + int g1,g2; + + STBTT_assert(!info->cff.size); + + if (glyph_index >= info->numGlyphs) return -1; // glyph index out of range + if (info->indexToLocFormat >= 2) return -1; // unknown index->glyph map format + + if (info->indexToLocFormat == 0) { + g1 = info->glyf + ttUSHORT(info->data + info->loca + glyph_index * 2) * 2; + g2 = info->glyf + ttUSHORT(info->data + info->loca + glyph_index * 2 + 2) * 2; + } else { + g1 = info->glyf + ttULONG (info->data + info->loca + glyph_index * 4); + g2 = info->glyf + ttULONG (info->data + info->loca + glyph_index * 4 + 4); + } + + return g1==g2 ? -1 : g1; // if length is 0, return -1 +} + +static int stbtt__GetGlyphInfoT2(const stbtt_fontinfo *info, int glyph_index, int *x0, int *y0, int *x1, int *y1); + +STBTT_DEF int stbtt_GetGlyphBox(const stbtt_fontinfo *info, int glyph_index, int *x0, int *y0, int *x1, int *y1) +{ + if (info->cff.size) { + stbtt__GetGlyphInfoT2(info, glyph_index, x0, y0, x1, y1); + } else { + int g = stbtt__GetGlyfOffset(info, glyph_index); + if (g < 0) return 0; + + if (x0) *x0 = ttSHORT(info->data + g + 2); + if (y0) *y0 = ttSHORT(info->data + g + 4); + if (x1) *x1 = ttSHORT(info->data + g + 6); + if (y1) *y1 = ttSHORT(info->data + g + 8); + } + return 1; +} + +STBTT_DEF int stbtt_GetCodepointBox(const stbtt_fontinfo *info, int codepoint, int *x0, int *y0, int *x1, int *y1) +{ + return stbtt_GetGlyphBox(info, stbtt_FindGlyphIndex(info,codepoint), x0,y0,x1,y1); +} + +STBTT_DEF int stbtt_IsGlyphEmpty(const stbtt_fontinfo *info, int glyph_index) +{ + stbtt_int16 numberOfContours; + int g; + if (info->cff.size) + return stbtt__GetGlyphInfoT2(info, glyph_index, NULL, NULL, NULL, NULL) == 0; + g = stbtt__GetGlyfOffset(info, glyph_index); + if (g < 0) return 1; + numberOfContours = ttSHORT(info->data + g); + return numberOfContours == 0; +} + +static int stbtt__close_shape(stbtt_vertex *vertices, int num_vertices, int was_off, int start_off, + stbtt_int32 sx, stbtt_int32 sy, stbtt_int32 scx, stbtt_int32 scy, stbtt_int32 cx, stbtt_int32 cy) +{ + if (start_off) { + if (was_off) + stbtt_setvertex(&vertices[num_vertices++], STBTT_vcurve, (cx+scx)>>1, (cy+scy)>>1, cx,cy); + stbtt_setvertex(&vertices[num_vertices++], STBTT_vcurve, sx,sy,scx,scy); + } else { + if (was_off) + stbtt_setvertex(&vertices[num_vertices++], STBTT_vcurve,sx,sy,cx,cy); + else + stbtt_setvertex(&vertices[num_vertices++], STBTT_vline,sx,sy,0,0); + } + return num_vertices; +} + +static int stbtt__GetGlyphShapeTT(const stbtt_fontinfo *info, int glyph_index, stbtt_vertex **pvertices) +{ + stbtt_int16 numberOfContours; + stbtt_uint8 *endPtsOfContours; + stbtt_uint8 *data = info->data; + stbtt_vertex *vertices=0; + int num_vertices=0; + int g = stbtt__GetGlyfOffset(info, glyph_index); + + *pvertices = NULL; + + if (g < 0) return 0; + + numberOfContours = ttSHORT(data + g); + + if (numberOfContours > 0) { + stbtt_uint8 flags=0,flagcount; + stbtt_int32 ins, i,j=0,m,n, next_move, was_off=0, off, start_off=0; + stbtt_int32 x,y,cx,cy,sx,sy, scx,scy; + stbtt_uint8 *points; + endPtsOfContours = (data + g + 10); + ins = ttUSHORT(data + g + 10 + numberOfContours * 2); + points = data + g + 10 + numberOfContours * 2 + 2 + ins; + + n = 1+ttUSHORT(endPtsOfContours + numberOfContours*2-2); + + m = n + 2*numberOfContours; // a loose bound on how many vertices we might need + vertices = (stbtt_vertex *) STBTT_malloc(m * sizeof(vertices[0]), info->userdata); + if (vertices == 0) + return 0; + + next_move = 0; + flagcount=0; + + // in first pass, we load uninterpreted data into the allocated array + // above, shifted to the end of the array so we won't overwrite it when + // we create our final data starting from the front + + off = m - n; // starting offset for uninterpreted data, regardless of how m ends up being calculated + + // first load flags + + for (i=0; i < n; ++i) { + if (flagcount == 0) { + flags = *points++; + if (flags & 8) + flagcount = *points++; + } else + --flagcount; + vertices[off+i].type = flags; + } + + // now load x coordinates + x=0; + for (i=0; i < n; ++i) { + flags = vertices[off+i].type; + if (flags & 2) { + stbtt_int16 dx = *points++; + x += (flags & 16) ? dx : -dx; // ??? + } else { + if (!(flags & 16)) { + x = x + (stbtt_int16) (points[0]*256 + points[1]); + points += 2; + } + } + vertices[off+i].x = (stbtt_int16) x; + } + + // now load y coordinates + y=0; + for (i=0; i < n; ++i) { + flags = vertices[off+i].type; + if (flags & 4) { + stbtt_int16 dy = *points++; + y += (flags & 32) ? dy : -dy; // ??? + } else { + if (!(flags & 32)) { + y = y + (stbtt_int16) (points[0]*256 + points[1]); + points += 2; + } + } + vertices[off+i].y = (stbtt_int16) y; + } + + // now convert them to our format + num_vertices=0; + sx = sy = cx = cy = scx = scy = 0; + for (i=0; i < n; ++i) { + flags = vertices[off+i].type; + x = (stbtt_int16) vertices[off+i].x; + y = (stbtt_int16) vertices[off+i].y; + + if (next_move == i) { + if (i != 0) + num_vertices = stbtt__close_shape(vertices, num_vertices, was_off, start_off, sx,sy,scx,scy,cx,cy); + + // now start the new one + start_off = !(flags & 1); + if (start_off) { + // if we start off with an off-curve point, then when we need to find a point on the curve + // where we can start, and we need to save some state for when we wraparound. + scx = x; + scy = y; + if (!(vertices[off+i+1].type & 1)) { + // next point is also a curve point, so interpolate an on-point curve + sx = (x + (stbtt_int32) vertices[off+i+1].x) >> 1; + sy = (y + (stbtt_int32) vertices[off+i+1].y) >> 1; + } else { + // otherwise just use the next point as our start point + sx = (stbtt_int32) vertices[off+i+1].x; + sy = (stbtt_int32) vertices[off+i+1].y; + ++i; // we're using point i+1 as the starting point, so skip it + } + } else { + sx = x; + sy = y; + } + stbtt_setvertex(&vertices[num_vertices++], STBTT_vmove,sx,sy,0,0); + was_off = 0; + next_move = 1 + ttUSHORT(endPtsOfContours+j*2); + ++j; + } else { + if (!(flags & 1)) { // if it's a curve + if (was_off) // two off-curve control points in a row means interpolate an on-curve midpoint + stbtt_setvertex(&vertices[num_vertices++], STBTT_vcurve, (cx+x)>>1, (cy+y)>>1, cx, cy); + cx = x; + cy = y; + was_off = 1; + } else { + if (was_off) + stbtt_setvertex(&vertices[num_vertices++], STBTT_vcurve, x,y, cx, cy); + else + stbtt_setvertex(&vertices[num_vertices++], STBTT_vline, x,y,0,0); + was_off = 0; + } + } + } + num_vertices = stbtt__close_shape(vertices, num_vertices, was_off, start_off, sx,sy,scx,scy,cx,cy); + } else if (numberOfContours < 0) { + // Compound shapes. + int more = 1; + stbtt_uint8 *comp = data + g + 10; + num_vertices = 0; + vertices = 0; + while (more) { + stbtt_uint16 flags, gidx; + int comp_num_verts = 0, i; + stbtt_vertex *comp_verts = 0, *tmp = 0; + float mtx[6] = {1,0,0,1,0,0}, m, n; + + flags = ttSHORT(comp); comp+=2; + gidx = ttSHORT(comp); comp+=2; + + if (flags & 2) { // XY values + if (flags & 1) { // shorts + mtx[4] = ttSHORT(comp); comp+=2; + mtx[5] = ttSHORT(comp); comp+=2; + } else { + mtx[4] = ttCHAR(comp); comp+=1; + mtx[5] = ttCHAR(comp); comp+=1; + } + } + else { + // @TODO handle matching point + STBTT_assert(0); + } + if (flags & (1<<3)) { // WE_HAVE_A_SCALE + mtx[0] = mtx[3] = ttSHORT(comp)/16384.0f; comp+=2; + mtx[1] = mtx[2] = 0; + } else if (flags & (1<<6)) { // WE_HAVE_AN_X_AND_YSCALE + mtx[0] = ttSHORT(comp)/16384.0f; comp+=2; + mtx[1] = mtx[2] = 0; + mtx[3] = ttSHORT(comp)/16384.0f; comp+=2; + } else if (flags & (1<<7)) { // WE_HAVE_A_TWO_BY_TWO + mtx[0] = ttSHORT(comp)/16384.0f; comp+=2; + mtx[1] = ttSHORT(comp)/16384.0f; comp+=2; + mtx[2] = ttSHORT(comp)/16384.0f; comp+=2; + mtx[3] = ttSHORT(comp)/16384.0f; comp+=2; + } + + // Find transformation scales. + m = (float) STBTT_sqrt(mtx[0]*mtx[0] + mtx[1]*mtx[1]); + n = (float) STBTT_sqrt(mtx[2]*mtx[2] + mtx[3]*mtx[3]); + + // Get indexed glyph. + comp_num_verts = stbtt_GetGlyphShape(info, gidx, &comp_verts); + if (comp_num_verts > 0) { + // Transform vertices. + for (i = 0; i < comp_num_verts; ++i) { + stbtt_vertex* v = &comp_verts[i]; + stbtt_vertex_type x,y; + x=v->x; y=v->y; + v->x = (stbtt_vertex_type)(m * (mtx[0]*x + mtx[2]*y + mtx[4])); + v->y = (stbtt_vertex_type)(n * (mtx[1]*x + mtx[3]*y + mtx[5])); + x=v->cx; y=v->cy; + v->cx = (stbtt_vertex_type)(m * (mtx[0]*x + mtx[2]*y + mtx[4])); + v->cy = (stbtt_vertex_type)(n * (mtx[1]*x + mtx[3]*y + mtx[5])); + } + // Append vertices. + tmp = (stbtt_vertex*)STBTT_malloc((num_vertices+comp_num_verts)*sizeof(stbtt_vertex), info->userdata); + if (!tmp) { + if (vertices) STBTT_free(vertices, info->userdata); + if (comp_verts) STBTT_free(comp_verts, info->userdata); + return 0; + } + if (num_vertices > 0 && vertices) STBTT_memcpy(tmp, vertices, num_vertices*sizeof(stbtt_vertex)); + STBTT_memcpy(tmp+num_vertices, comp_verts, comp_num_verts*sizeof(stbtt_vertex)); + if (vertices) STBTT_free(vertices, info->userdata); + vertices = tmp; + STBTT_free(comp_verts, info->userdata); + num_vertices += comp_num_verts; + } + // More components ? + more = flags & (1<<5); + } + } else { + // numberOfCounters == 0, do nothing + } + + *pvertices = vertices; + return num_vertices; +} + +typedef struct +{ + int bounds; + int started; + float first_x, first_y; + float x, y; + stbtt_int32 min_x, max_x, min_y, max_y; + + stbtt_vertex *pvertices; + int num_vertices; +} stbtt__csctx; + +#define STBTT__CSCTX_INIT(bounds) {bounds,0, 0,0, 0,0, 0,0,0,0, NULL, 0} + +static void stbtt__track_vertex(stbtt__csctx *c, stbtt_int32 x, stbtt_int32 y) +{ + if (x > c->max_x || !c->started) c->max_x = x; + if (y > c->max_y || !c->started) c->max_y = y; + if (x < c->min_x || !c->started) c->min_x = x; + if (y < c->min_y || !c->started) c->min_y = y; + c->started = 1; +} + +static void stbtt__csctx_v(stbtt__csctx *c, stbtt_uint8 type, stbtt_int32 x, stbtt_int32 y, stbtt_int32 cx, stbtt_int32 cy, stbtt_int32 cx1, stbtt_int32 cy1) +{ + if (c->bounds) { + stbtt__track_vertex(c, x, y); + if (type == STBTT_vcubic) { + stbtt__track_vertex(c, cx, cy); + stbtt__track_vertex(c, cx1, cy1); + } + } else { + stbtt_setvertex(&c->pvertices[c->num_vertices], type, x, y, cx, cy); + c->pvertices[c->num_vertices].cx1 = (stbtt_int16) cx1; + c->pvertices[c->num_vertices].cy1 = (stbtt_int16) cy1; + } + c->num_vertices++; +} + +static void stbtt__csctx_close_shape(stbtt__csctx *ctx) +{ + if (ctx->first_x != ctx->x || ctx->first_y != ctx->y) + stbtt__csctx_v(ctx, STBTT_vline, (int)ctx->first_x, (int)ctx->first_y, 0, 0, 0, 0); +} + +static void stbtt__csctx_rmove_to(stbtt__csctx *ctx, float dx, float dy) +{ + stbtt__csctx_close_shape(ctx); + ctx->first_x = ctx->x = ctx->x + dx; + ctx->first_y = ctx->y = ctx->y + dy; + stbtt__csctx_v(ctx, STBTT_vmove, (int)ctx->x, (int)ctx->y, 0, 0, 0, 0); +} + +static void stbtt__csctx_rline_to(stbtt__csctx *ctx, float dx, float dy) +{ + ctx->x += dx; + ctx->y += dy; + stbtt__csctx_v(ctx, STBTT_vline, (int)ctx->x, (int)ctx->y, 0, 0, 0, 0); +} + +static void stbtt__csctx_rccurve_to(stbtt__csctx *ctx, float dx1, float dy1, float dx2, float dy2, float dx3, float dy3) +{ + float cx1 = ctx->x + dx1; + float cy1 = ctx->y + dy1; + float cx2 = cx1 + dx2; + float cy2 = cy1 + dy2; + ctx->x = cx2 + dx3; + ctx->y = cy2 + dy3; + stbtt__csctx_v(ctx, STBTT_vcubic, (int)ctx->x, (int)ctx->y, (int)cx1, (int)cy1, (int)cx2, (int)cy2); +} + +static stbtt__buf stbtt__get_subr(stbtt__buf idx, int n) +{ + int count = stbtt__cff_index_count(&idx); + int bias = 107; + if (count >= 33900) + bias = 32768; + else if (count >= 1240) + bias = 1131; + n += bias; + if (n < 0 || n >= count) + return stbtt__new_buf(NULL, 0); + return stbtt__cff_index_get(idx, n); +} + +static stbtt__buf stbtt__cid_get_glyph_subrs(const stbtt_fontinfo *info, int glyph_index) +{ + stbtt__buf fdselect = info->fdselect; + int nranges, start, end, v, fmt, fdselector = -1, i; + + stbtt__buf_seek(&fdselect, 0); + fmt = stbtt__buf_get8(&fdselect); + if (fmt == 0) { + // untested + stbtt__buf_skip(&fdselect, glyph_index); + fdselector = stbtt__buf_get8(&fdselect); + } else if (fmt == 3) { + nranges = stbtt__buf_get16(&fdselect); + start = stbtt__buf_get16(&fdselect); + for (i = 0; i < nranges; i++) { + v = stbtt__buf_get8(&fdselect); + end = stbtt__buf_get16(&fdselect); + if (glyph_index >= start && glyph_index < end) { + fdselector = v; + break; + } + start = end; + } + } + if (fdselector == -1) stbtt__new_buf(NULL, 0); + return stbtt__get_subrs(info->cff, stbtt__cff_index_get(info->fontdicts, fdselector)); +} + +static int stbtt__run_charstring(const stbtt_fontinfo *info, int glyph_index, stbtt__csctx *c) +{ + int in_header = 1, maskbits = 0, subr_stack_height = 0, sp = 0, v, i, b0; + int has_subrs = 0, clear_stack; + float s[48]; + stbtt__buf subr_stack[10], subrs = info->subrs, b; + float f; + +#define STBTT__CSERR(s) (0) + + // this currently ignores the initial width value, which isn't needed if we have hmtx + b = stbtt__cff_index_get(info->charstrings, glyph_index); + while (b.cursor < b.size) { + i = 0; + clear_stack = 1; + b0 = stbtt__buf_get8(&b); + switch (b0) { + // @TODO implement hinting + case 0x13: // hintmask + case 0x14: // cntrmask + if (in_header) + maskbits += (sp / 2); // implicit "vstem" + in_header = 0; + stbtt__buf_skip(&b, (maskbits + 7) / 8); + break; + + case 0x01: // hstem + case 0x03: // vstem + case 0x12: // hstemhm + case 0x17: // vstemhm + maskbits += (sp / 2); + break; + + case 0x15: // rmoveto + in_header = 0; + if (sp < 2) return STBTT__CSERR("rmoveto stack"); + stbtt__csctx_rmove_to(c, s[sp-2], s[sp-1]); + break; + case 0x04: // vmoveto + in_header = 0; + if (sp < 1) return STBTT__CSERR("vmoveto stack"); + stbtt__csctx_rmove_to(c, 0, s[sp-1]); + break; + case 0x16: // hmoveto + in_header = 0; + if (sp < 1) return STBTT__CSERR("hmoveto stack"); + stbtt__csctx_rmove_to(c, s[sp-1], 0); + break; + + case 0x05: // rlineto + if (sp < 2) return STBTT__CSERR("rlineto stack"); + for (; i + 1 < sp; i += 2) + stbtt__csctx_rline_to(c, s[i], s[i+1]); + break; + + // hlineto/vlineto and vhcurveto/hvcurveto alternate horizontal and vertical + // starting from a different place. + + case 0x07: // vlineto + if (sp < 1) return STBTT__CSERR("vlineto stack"); + goto vlineto; + case 0x06: // hlineto + if (sp < 1) return STBTT__CSERR("hlineto stack"); + for (;;) { + if (i >= sp) break; + stbtt__csctx_rline_to(c, s[i], 0); + i++; + vlineto: + if (i >= sp) break; + stbtt__csctx_rline_to(c, 0, s[i]); + i++; + } + break; + + case 0x1F: // hvcurveto + if (sp < 4) return STBTT__CSERR("hvcurveto stack"); + goto hvcurveto; + case 0x1E: // vhcurveto + if (sp < 4) return STBTT__CSERR("vhcurveto stack"); + for (;;) { + if (i + 3 >= sp) break; + stbtt__csctx_rccurve_to(c, 0, s[i], s[i+1], s[i+2], s[i+3], (sp - i == 5) ? s[i + 4] : 0.0f); + i += 4; + hvcurveto: + if (i + 3 >= sp) break; + stbtt__csctx_rccurve_to(c, s[i], 0, s[i+1], s[i+2], (sp - i == 5) ? s[i+4] : 0.0f, s[i+3]); + i += 4; + } + break; + + case 0x08: // rrcurveto + if (sp < 6) return STBTT__CSERR("rcurveline stack"); + for (; i + 5 < sp; i += 6) + stbtt__csctx_rccurve_to(c, s[i], s[i+1], s[i+2], s[i+3], s[i+4], s[i+5]); + break; + + case 0x18: // rcurveline + if (sp < 8) return STBTT__CSERR("rcurveline stack"); + for (; i + 5 < sp - 2; i += 6) + stbtt__csctx_rccurve_to(c, s[i], s[i+1], s[i+2], s[i+3], s[i+4], s[i+5]); + if (i + 1 >= sp) return STBTT__CSERR("rcurveline stack"); + stbtt__csctx_rline_to(c, s[i], s[i+1]); + break; + + case 0x19: // rlinecurve + if (sp < 8) return STBTT__CSERR("rlinecurve stack"); + for (; i + 1 < sp - 6; i += 2) + stbtt__csctx_rline_to(c, s[i], s[i+1]); + if (i + 5 >= sp) return STBTT__CSERR("rlinecurve stack"); + stbtt__csctx_rccurve_to(c, s[i], s[i+1], s[i+2], s[i+3], s[i+4], s[i+5]); + break; + + case 0x1A: // vvcurveto + case 0x1B: // hhcurveto + if (sp < 4) return STBTT__CSERR("(vv|hh)curveto stack"); + f = 0.0; + if (sp & 1) { f = s[i]; i++; } + for (; i + 3 < sp; i += 4) { + if (b0 == 0x1B) + stbtt__csctx_rccurve_to(c, s[i], f, s[i+1], s[i+2], s[i+3], 0.0); + else + stbtt__csctx_rccurve_to(c, f, s[i], s[i+1], s[i+2], 0.0, s[i+3]); + f = 0.0; + } + break; + + case 0x0A: // callsubr + if (!has_subrs) { + if (info->fdselect.size) + subrs = stbtt__cid_get_glyph_subrs(info, glyph_index); + has_subrs = 1; + } + // FALLTHROUGH + case 0x1D: // callgsubr + if (sp < 1) return STBTT__CSERR("call(g|)subr stack"); + v = (int) s[--sp]; + if (subr_stack_height >= 10) return STBTT__CSERR("recursion limit"); + subr_stack[subr_stack_height++] = b; + b = stbtt__get_subr(b0 == 0x0A ? subrs : info->gsubrs, v); + if (b.size == 0) return STBTT__CSERR("subr not found"); + b.cursor = 0; + clear_stack = 0; + break; + + case 0x0B: // return + if (subr_stack_height <= 0) return STBTT__CSERR("return outside subr"); + b = subr_stack[--subr_stack_height]; + clear_stack = 0; + break; + + case 0x0E: // endchar + stbtt__csctx_close_shape(c); + return 1; + + case 0x0C: { // two-byte escape + float dx1, dx2, dx3, dx4, dx5, dx6, dy1, dy2, dy3, dy4, dy5, dy6; + float dx, dy; + int b1 = stbtt__buf_get8(&b); + switch (b1) { + // @TODO These "flex" implementations ignore the flex-depth and resolution, + // and always draw beziers. + case 0x22: // hflex + if (sp < 7) return STBTT__CSERR("hflex stack"); + dx1 = s[0]; + dx2 = s[1]; + dy2 = s[2]; + dx3 = s[3]; + dx4 = s[4]; + dx5 = s[5]; + dx6 = s[6]; + stbtt__csctx_rccurve_to(c, dx1, 0, dx2, dy2, dx3, 0); + stbtt__csctx_rccurve_to(c, dx4, 0, dx5, -dy2, dx6, 0); + break; + + case 0x23: // flex + if (sp < 13) return STBTT__CSERR("flex stack"); + dx1 = s[0]; + dy1 = s[1]; + dx2 = s[2]; + dy2 = s[3]; + dx3 = s[4]; + dy3 = s[5]; + dx4 = s[6]; + dy4 = s[7]; + dx5 = s[8]; + dy5 = s[9]; + dx6 = s[10]; + dy6 = s[11]; + //fd is s[12] + stbtt__csctx_rccurve_to(c, dx1, dy1, dx2, dy2, dx3, dy3); + stbtt__csctx_rccurve_to(c, dx4, dy4, dx5, dy5, dx6, dy6); + break; + + case 0x24: // hflex1 + if (sp < 9) return STBTT__CSERR("hflex1 stack"); + dx1 = s[0]; + dy1 = s[1]; + dx2 = s[2]; + dy2 = s[3]; + dx3 = s[4]; + dx4 = s[5]; + dx5 = s[6]; + dy5 = s[7]; + dx6 = s[8]; + stbtt__csctx_rccurve_to(c, dx1, dy1, dx2, dy2, dx3, 0); + stbtt__csctx_rccurve_to(c, dx4, 0, dx5, dy5, dx6, -(dy1+dy2+dy5)); + break; + + case 0x25: // flex1 + if (sp < 11) return STBTT__CSERR("flex1 stack"); + dx1 = s[0]; + dy1 = s[1]; + dx2 = s[2]; + dy2 = s[3]; + dx3 = s[4]; + dy3 = s[5]; + dx4 = s[6]; + dy4 = s[7]; + dx5 = s[8]; + dy5 = s[9]; + dx6 = dy6 = s[10]; + dx = dx1+dx2+dx3+dx4+dx5; + dy = dy1+dy2+dy3+dy4+dy5; + if (STBTT_fabs(dx) > STBTT_fabs(dy)) + dy6 = -dy; + else + dx6 = -dx; + stbtt__csctx_rccurve_to(c, dx1, dy1, dx2, dy2, dx3, dy3); + stbtt__csctx_rccurve_to(c, dx4, dy4, dx5, dy5, dx6, dy6); + break; + + default: + return STBTT__CSERR("unimplemented"); + } + } break; + + default: + if (b0 != 255 && b0 != 28 && b0 < 32) + return STBTT__CSERR("reserved operator"); + + // push immediate + if (b0 == 255) { + f = (float)(stbtt_int32)stbtt__buf_get32(&b) / 0x10000; + } else { + stbtt__buf_skip(&b, -1); + f = (float)(stbtt_int16)stbtt__cff_int(&b); + } + if (sp >= 48) return STBTT__CSERR("push stack overflow"); + s[sp++] = f; + clear_stack = 0; + break; + } + if (clear_stack) sp = 0; + } + return STBTT__CSERR("no endchar"); + +#undef STBTT__CSERR +} + +static int stbtt__GetGlyphShapeT2(const stbtt_fontinfo *info, int glyph_index, stbtt_vertex **pvertices) +{ + // runs the charstring twice, once to count and once to output (to avoid realloc) + stbtt__csctx count_ctx = STBTT__CSCTX_INIT(1); + stbtt__csctx output_ctx = STBTT__CSCTX_INIT(0); + if (stbtt__run_charstring(info, glyph_index, &count_ctx)) { + *pvertices = (stbtt_vertex*)STBTT_malloc(count_ctx.num_vertices*sizeof(stbtt_vertex), info->userdata); + output_ctx.pvertices = *pvertices; + if (stbtt__run_charstring(info, glyph_index, &output_ctx)) { + STBTT_assert(output_ctx.num_vertices == count_ctx.num_vertices); + return output_ctx.num_vertices; + } + } + *pvertices = NULL; + return 0; +} + +static int stbtt__GetGlyphInfoT2(const stbtt_fontinfo *info, int glyph_index, int *x0, int *y0, int *x1, int *y1) +{ + stbtt__csctx c = STBTT__CSCTX_INIT(1); + int r = stbtt__run_charstring(info, glyph_index, &c); + if (x0) *x0 = r ? c.min_x : 0; + if (y0) *y0 = r ? c.min_y : 0; + if (x1) *x1 = r ? c.max_x : 0; + if (y1) *y1 = r ? c.max_y : 0; + return r ? c.num_vertices : 0; +} + +STBTT_DEF int stbtt_GetGlyphShape(const stbtt_fontinfo *info, int glyph_index, stbtt_vertex **pvertices) +{ + if (!info->cff.size) + return stbtt__GetGlyphShapeTT(info, glyph_index, pvertices); + else + return stbtt__GetGlyphShapeT2(info, glyph_index, pvertices); +} + +STBTT_DEF void stbtt_GetGlyphHMetrics(const stbtt_fontinfo *info, int glyph_index, int *advanceWidth, int *leftSideBearing) +{ + stbtt_uint16 numOfLongHorMetrics = ttUSHORT(info->data+info->hhea + 34); + if (glyph_index < numOfLongHorMetrics) { + if (advanceWidth) *advanceWidth = ttSHORT(info->data + info->hmtx + 4*glyph_index); + if (leftSideBearing) *leftSideBearing = ttSHORT(info->data + info->hmtx + 4*glyph_index + 2); + } else { + if (advanceWidth) *advanceWidth = ttSHORT(info->data + info->hmtx + 4*(numOfLongHorMetrics-1)); + if (leftSideBearing) *leftSideBearing = ttSHORT(info->data + info->hmtx + 4*numOfLongHorMetrics + 2*(glyph_index - numOfLongHorMetrics)); + } +} + +STBTT_DEF int stbtt_GetKerningTableLength(const stbtt_fontinfo *info) +{ + stbtt_uint8 *data = info->data + info->kern; + + // we only look at the first table. it must be 'horizontal' and format 0. + if (!info->kern) + return 0; + if (ttUSHORT(data+2) < 1) // number of tables, need at least 1 + return 0; + if (ttUSHORT(data+8) != 1) // horizontal flag must be set in format + return 0; + + return ttUSHORT(data+10); +} + +STBTT_DEF int stbtt_GetKerningTable(const stbtt_fontinfo *info, stbtt_kerningentry* table, int table_length) +{ + stbtt_uint8 *data = info->data + info->kern; + int k, length; + + // we only look at the first table. it must be 'horizontal' and format 0. + if (!info->kern) + return 0; + if (ttUSHORT(data+2) < 1) // number of tables, need at least 1 + return 0; + if (ttUSHORT(data+8) != 1) // horizontal flag must be set in format + return 0; + + length = ttUSHORT(data+10); + if (table_length < length) + length = table_length; + + for (k = 0; k < length; k++) + { + table[k].glyph1 = ttUSHORT(data+18+(k*6)); + table[k].glyph2 = ttUSHORT(data+20+(k*6)); + table[k].advance = ttSHORT(data+22+(k*6)); + } + + return length; +} + +static int stbtt__GetGlyphKernInfoAdvance(const stbtt_fontinfo *info, int glyph1, int glyph2) +{ + stbtt_uint8 *data = info->data + info->kern; + stbtt_uint32 needle, straw; + int l, r, m; + + // we only look at the first table. it must be 'horizontal' and format 0. + if (!info->kern) + return 0; + if (ttUSHORT(data+2) < 1) // number of tables, need at least 1 + return 0; + if (ttUSHORT(data+8) != 1) // horizontal flag must be set in format + return 0; + + l = 0; + r = ttUSHORT(data+10) - 1; + needle = glyph1 << 16 | glyph2; + while (l <= r) { + m = (l + r) >> 1; + straw = ttULONG(data+18+(m*6)); // note: unaligned read + if (needle < straw) + r = m - 1; + else if (needle > straw) + l = m + 1; + else + return ttSHORT(data+22+(m*6)); + } + return 0; +} + +static stbtt_int32 stbtt__GetCoverageIndex(stbtt_uint8 *coverageTable, int glyph) +{ + stbtt_uint16 coverageFormat = ttUSHORT(coverageTable); + switch (coverageFormat) { + case 1: { + stbtt_uint16 glyphCount = ttUSHORT(coverageTable + 2); + + // Binary search. + stbtt_int32 l=0, r=glyphCount-1, m; + int straw, needle=glyph; + while (l <= r) { + stbtt_uint8 *glyphArray = coverageTable + 4; + stbtt_uint16 glyphID; + m = (l + r) >> 1; + glyphID = ttUSHORT(glyphArray + 2 * m); + straw = glyphID; + if (needle < straw) + r = m - 1; + else if (needle > straw) + l = m + 1; + else { + return m; + } + } + break; + } + + case 2: { + stbtt_uint16 rangeCount = ttUSHORT(coverageTable + 2); + stbtt_uint8 *rangeArray = coverageTable + 4; + + // Binary search. + stbtt_int32 l=0, r=rangeCount-1, m; + int strawStart, strawEnd, needle=glyph; + while (l <= r) { + stbtt_uint8 *rangeRecord; + m = (l + r) >> 1; + rangeRecord = rangeArray + 6 * m; + strawStart = ttUSHORT(rangeRecord); + strawEnd = ttUSHORT(rangeRecord + 2); + if (needle < strawStart) + r = m - 1; + else if (needle > strawEnd) + l = m + 1; + else { + stbtt_uint16 startCoverageIndex = ttUSHORT(rangeRecord + 4); + return startCoverageIndex + glyph - strawStart; + } + } + break; + } + + default: return -1; // unsupported + } + + return -1; +} + +static stbtt_int32 stbtt__GetGlyphClass(stbtt_uint8 *classDefTable, int glyph) +{ + stbtt_uint16 classDefFormat = ttUSHORT(classDefTable); + switch (classDefFormat) + { + case 1: { + stbtt_uint16 startGlyphID = ttUSHORT(classDefTable + 2); + stbtt_uint16 glyphCount = ttUSHORT(classDefTable + 4); + stbtt_uint8 *classDef1ValueArray = classDefTable + 6; + + if (glyph >= startGlyphID && glyph < startGlyphID + glyphCount) + return (stbtt_int32)ttUSHORT(classDef1ValueArray + 2 * (glyph - startGlyphID)); + break; + } + + case 2: { + stbtt_uint16 classRangeCount = ttUSHORT(classDefTable + 2); + stbtt_uint8 *classRangeRecords = classDefTable + 4; + + // Binary search. + stbtt_int32 l=0, r=classRangeCount-1, m; + int strawStart, strawEnd, needle=glyph; + while (l <= r) { + stbtt_uint8 *classRangeRecord; + m = (l + r) >> 1; + classRangeRecord = classRangeRecords + 6 * m; + strawStart = ttUSHORT(classRangeRecord); + strawEnd = ttUSHORT(classRangeRecord + 2); + if (needle < strawStart) + r = m - 1; + else if (needle > strawEnd) + l = m + 1; + else + return (stbtt_int32)ttUSHORT(classRangeRecord + 4); + } + break; + } + + default: + return -1; // Unsupported definition type, return an error. + } + + // "All glyphs not assigned to a class fall into class 0". (OpenType spec) + return 0; +} + +// Define to STBTT_assert(x) if you want to break on unimplemented formats. +#define STBTT_GPOS_TODO_assert(x) + +static stbtt_int32 stbtt__GetGlyphGPOSInfoAdvance(const stbtt_fontinfo *info, int glyph1, int glyph2) +{ + stbtt_uint16 lookupListOffset; + stbtt_uint8 *lookupList; + stbtt_uint16 lookupCount; + stbtt_uint8 *data; + stbtt_int32 i, sti; + + if (!info->gpos) return 0; + + data = info->data + info->gpos; + + if (ttUSHORT(data+0) != 1) return 0; // Major version 1 + if (ttUSHORT(data+2) != 0) return 0; // Minor version 0 + + lookupListOffset = ttUSHORT(data+8); + lookupList = data + lookupListOffset; + lookupCount = ttUSHORT(lookupList); + + for (i=0; i= pairSetCount) return 0; + + needle=glyph2; + r=pairValueCount-1; + l=0; + + // Binary search. + while (l <= r) { + stbtt_uint16 secondGlyph; + stbtt_uint8 *pairValue; + m = (l + r) >> 1; + pairValue = pairValueArray + (2 + valueRecordPairSizeInBytes) * m; + secondGlyph = ttUSHORT(pairValue); + straw = secondGlyph; + if (needle < straw) + r = m - 1; + else if (needle > straw) + l = m + 1; + else { + stbtt_int16 xAdvance = ttSHORT(pairValue + 2); + return xAdvance; + } + } + } else + return 0; + break; + } + + case 2: { + stbtt_uint16 valueFormat1 = ttUSHORT(table + 4); + stbtt_uint16 valueFormat2 = ttUSHORT(table + 6); + if (valueFormat1 == 4 && valueFormat2 == 0) { // Support more formats? + stbtt_uint16 classDef1Offset = ttUSHORT(table + 8); + stbtt_uint16 classDef2Offset = ttUSHORT(table + 10); + int glyph1class = stbtt__GetGlyphClass(table + classDef1Offset, glyph1); + int glyph2class = stbtt__GetGlyphClass(table + classDef2Offset, glyph2); + + stbtt_uint16 class1Count = ttUSHORT(table + 12); + stbtt_uint16 class2Count = ttUSHORT(table + 14); + stbtt_uint8 *class1Records, *class2Records; + stbtt_int16 xAdvance; + + if (glyph1class < 0 || glyph1class >= class1Count) return 0; // malformed + if (glyph2class < 0 || glyph2class >= class2Count) return 0; // malformed + + class1Records = table + 16; + class2Records = class1Records + 2 * (glyph1class * class2Count); + xAdvance = ttSHORT(class2Records + 2 * glyph2class); + return xAdvance; + } else + return 0; + break; + } + + default: + return 0; // Unsupported position format + } + } + } + + return 0; +} + +STBTT_DEF int stbtt_GetGlyphKernAdvance(const stbtt_fontinfo *info, int g1, int g2) +{ + int xAdvance = 0; + + if (info->gpos) + xAdvance += stbtt__GetGlyphGPOSInfoAdvance(info, g1, g2); + else if (info->kern) + xAdvance += stbtt__GetGlyphKernInfoAdvance(info, g1, g2); + + return xAdvance; +} + +STBTT_DEF int stbtt_GetCodepointKernAdvance(const stbtt_fontinfo *info, int ch1, int ch2) +{ + if (!info->kern && !info->gpos) // if no kerning table, don't waste time looking up both codepoint->glyphs + return 0; + return stbtt_GetGlyphKernAdvance(info, stbtt_FindGlyphIndex(info,ch1), stbtt_FindGlyphIndex(info,ch2)); +} + +STBTT_DEF void stbtt_GetCodepointHMetrics(const stbtt_fontinfo *info, int codepoint, int *advanceWidth, int *leftSideBearing) +{ + stbtt_GetGlyphHMetrics(info, stbtt_FindGlyphIndex(info,codepoint), advanceWidth, leftSideBearing); +} + +STBTT_DEF void stbtt_GetFontVMetrics(const stbtt_fontinfo *info, int *ascent, int *descent, int *lineGap) +{ + if (ascent ) *ascent = ttSHORT(info->data+info->hhea + 4); + if (descent) *descent = ttSHORT(info->data+info->hhea + 6); + if (lineGap) *lineGap = ttSHORT(info->data+info->hhea + 8); +} + +STBTT_DEF int stbtt_GetFontVMetricsOS2(const stbtt_fontinfo *info, int *typoAscent, int *typoDescent, int *typoLineGap) +{ + int tab = stbtt__find_table(info->data, info->fontstart, "OS/2"); + if (!tab) + return 0; + if (typoAscent ) *typoAscent = ttSHORT(info->data+tab + 68); + if (typoDescent) *typoDescent = ttSHORT(info->data+tab + 70); + if (typoLineGap) *typoLineGap = ttSHORT(info->data+tab + 72); + return 1; +} + +STBTT_DEF void stbtt_GetFontBoundingBox(const stbtt_fontinfo *info, int *x0, int *y0, int *x1, int *y1) +{ + *x0 = ttSHORT(info->data + info->head + 36); + *y0 = ttSHORT(info->data + info->head + 38); + *x1 = ttSHORT(info->data + info->head + 40); + *y1 = ttSHORT(info->data + info->head + 42); +} + +STBTT_DEF float stbtt_ScaleForPixelHeight(const stbtt_fontinfo *info, float height) +{ + int fheight = ttSHORT(info->data + info->hhea + 4) - ttSHORT(info->data + info->hhea + 6); + return (float) height / fheight; +} + +STBTT_DEF float stbtt_ScaleForMappingEmToPixels(const stbtt_fontinfo *info, float pixels) +{ + int unitsPerEm = ttUSHORT(info->data + info->head + 18); + return pixels / unitsPerEm; +} + +STBTT_DEF void stbtt_FreeShape(const stbtt_fontinfo *info, stbtt_vertex *v) +{ + STBTT_free(v, info->userdata); +} + +STBTT_DEF stbtt_uint8 *stbtt_FindSVGDoc(const stbtt_fontinfo *info, int gl) +{ + int i; + stbtt_uint8 *data = info->data; + stbtt_uint8 *svg_doc_list = data + stbtt__get_svg((stbtt_fontinfo *) info); + + int numEntries = ttUSHORT(svg_doc_list); + stbtt_uint8 *svg_docs = svg_doc_list + 2; + + for(i=0; i= ttUSHORT(svg_doc)) && (gl <= ttUSHORT(svg_doc + 2))) + return svg_doc; + } + return 0; +} + +STBTT_DEF int stbtt_GetGlyphSVG(const stbtt_fontinfo *info, int gl, const char **svg) +{ + stbtt_uint8 *data = info->data; + stbtt_uint8 *svg_doc; + + if (info->svg == 0) + return 0; + + svg_doc = stbtt_FindSVGDoc(info, gl); + if (svg_doc != NULL) { + *svg = (char *) data + info->svg + ttULONG(svg_doc + 4); + return ttULONG(svg_doc + 8); + } else { + return 0; + } +} + +STBTT_DEF int stbtt_GetCodepointSVG(const stbtt_fontinfo *info, int unicode_codepoint, const char **svg) +{ + return stbtt_GetGlyphSVG(info, stbtt_FindGlyphIndex(info, unicode_codepoint), svg); +} + +////////////////////////////////////////////////////////////////////////////// +// +// antialiasing software rasterizer +// + +STBTT_DEF void stbtt_GetGlyphBitmapBoxSubpixel(const stbtt_fontinfo *font, int glyph, float scale_x, float scale_y,float shift_x, float shift_y, int *ix0, int *iy0, int *ix1, int *iy1) +{ + int x0=0,y0=0,x1,y1; // =0 suppresses compiler warning + if (!stbtt_GetGlyphBox(font, glyph, &x0,&y0,&x1,&y1)) { + // e.g. space character + if (ix0) *ix0 = 0; + if (iy0) *iy0 = 0; + if (ix1) *ix1 = 0; + if (iy1) *iy1 = 0; + } else { + // move to integral bboxes (treating pixels as little squares, what pixels get touched)? + if (ix0) *ix0 = STBTT_ifloor( x0 * scale_x + shift_x); + if (iy0) *iy0 = STBTT_ifloor(-y1 * scale_y + shift_y); + if (ix1) *ix1 = STBTT_iceil ( x1 * scale_x + shift_x); + if (iy1) *iy1 = STBTT_iceil (-y0 * scale_y + shift_y); + } +} + +STBTT_DEF void stbtt_GetGlyphBitmapBox(const stbtt_fontinfo *font, int glyph, float scale_x, float scale_y, int *ix0, int *iy0, int *ix1, int *iy1) +{ + stbtt_GetGlyphBitmapBoxSubpixel(font, glyph, scale_x, scale_y,0.0f,0.0f, ix0, iy0, ix1, iy1); +} + +STBTT_DEF void stbtt_GetCodepointBitmapBoxSubpixel(const stbtt_fontinfo *font, int codepoint, float scale_x, float scale_y, float shift_x, float shift_y, int *ix0, int *iy0, int *ix1, int *iy1) +{ + stbtt_GetGlyphBitmapBoxSubpixel(font, stbtt_FindGlyphIndex(font,codepoint), scale_x, scale_y,shift_x,shift_y, ix0,iy0,ix1,iy1); +} + +STBTT_DEF void stbtt_GetCodepointBitmapBox(const stbtt_fontinfo *font, int codepoint, float scale_x, float scale_y, int *ix0, int *iy0, int *ix1, int *iy1) +{ + stbtt_GetCodepointBitmapBoxSubpixel(font, codepoint, scale_x, scale_y,0.0f,0.0f, ix0,iy0,ix1,iy1); +} + +////////////////////////////////////////////////////////////////////////////// +// +// Rasterizer + +typedef struct stbtt__hheap_chunk +{ + struct stbtt__hheap_chunk *next; +} stbtt__hheap_chunk; + +typedef struct stbtt__hheap +{ + struct stbtt__hheap_chunk *head; + void *first_free; + int num_remaining_in_head_chunk; +} stbtt__hheap; + +static void *stbtt__hheap_alloc(stbtt__hheap *hh, size_t size, void *userdata) +{ + if (hh->first_free) { + void *p = hh->first_free; + hh->first_free = * (void **) p; + return p; + } else { + if (hh->num_remaining_in_head_chunk == 0) { + int count = (size < 32 ? 2000 : size < 128 ? 800 : 100); + stbtt__hheap_chunk *c = (stbtt__hheap_chunk *) STBTT_malloc(sizeof(stbtt__hheap_chunk) + size * count, userdata); + if (c == NULL) + return NULL; + c->next = hh->head; + hh->head = c; + hh->num_remaining_in_head_chunk = count; + } + --hh->num_remaining_in_head_chunk; + return (char *) (hh->head) + sizeof(stbtt__hheap_chunk) + size * hh->num_remaining_in_head_chunk; + } +} + +static void stbtt__hheap_free(stbtt__hheap *hh, void *p) +{ + *(void **) p = hh->first_free; + hh->first_free = p; +} + +static void stbtt__hheap_cleanup(stbtt__hheap *hh, void *userdata) +{ + stbtt__hheap_chunk *c = hh->head; + while (c) { + stbtt__hheap_chunk *n = c->next; + STBTT_free(c, userdata); + c = n; + } +} + +typedef struct stbtt__edge { + float x0,y0, x1,y1; + int invert; +} stbtt__edge; + + +typedef struct stbtt__active_edge +{ + struct stbtt__active_edge *next; + #if STBTT_RASTERIZER_VERSION==1 + int x,dx; + float ey; + int direction; + #elif STBTT_RASTERIZER_VERSION==2 + float fx,fdx,fdy; + float direction; + float sy; + float ey; + #else + #error "Unrecognized value of STBTT_RASTERIZER_VERSION" + #endif +} stbtt__active_edge; + +#if STBTT_RASTERIZER_VERSION == 1 +#define STBTT_FIXSHIFT 10 +#define STBTT_FIX (1 << STBTT_FIXSHIFT) +#define STBTT_FIXMASK (STBTT_FIX-1) + +static stbtt__active_edge *stbtt__new_active(stbtt__hheap *hh, stbtt__edge *e, int off_x, float start_point, void *userdata) +{ + stbtt__active_edge *z = (stbtt__active_edge *) stbtt__hheap_alloc(hh, sizeof(*z), userdata); + float dxdy = (e->x1 - e->x0) / (e->y1 - e->y0); + STBTT_assert(z != NULL); + if (!z) return z; + + // round dx down to avoid overshooting + if (dxdy < 0) + z->dx = -STBTT_ifloor(STBTT_FIX * -dxdy); + else + z->dx = STBTT_ifloor(STBTT_FIX * dxdy); + + z->x = STBTT_ifloor(STBTT_FIX * e->x0 + z->dx * (start_point - e->y0)); // use z->dx so when we offset later it's by the same amount + z->x -= off_x * STBTT_FIX; + + z->ey = e->y1; + z->next = 0; + z->direction = e->invert ? 1 : -1; + return z; +} +#elif STBTT_RASTERIZER_VERSION == 2 +static stbtt__active_edge *stbtt__new_active(stbtt__hheap *hh, stbtt__edge *e, int off_x, float start_point, void *userdata) +{ + stbtt__active_edge *z = (stbtt__active_edge *) stbtt__hheap_alloc(hh, sizeof(*z), userdata); + float dxdy = (e->x1 - e->x0) / (e->y1 - e->y0); + STBTT_assert(z != NULL); + //STBTT_assert(e->y0 <= start_point); + if (!z) return z; + z->fdx = dxdy; + z->fdy = dxdy != 0.0f ? (1.0f/dxdy) : 0.0f; + z->fx = e->x0 + dxdy * (start_point - e->y0); + z->fx -= off_x; + z->direction = e->invert ? 1.0f : -1.0f; + z->sy = e->y0; + z->ey = e->y1; + z->next = 0; + return z; +} +#else +#error "Unrecognized value of STBTT_RASTERIZER_VERSION" +#endif + +#if STBTT_RASTERIZER_VERSION == 1 +// note: this routine clips fills that extend off the edges... ideally this +// wouldn't happen, but it could happen if the truetype glyph bounding boxes +// are wrong, or if the user supplies a too-small bitmap +static void stbtt__fill_active_edges(unsigned char *scanline, int len, stbtt__active_edge *e, int max_weight) +{ + // non-zero winding fill + int x0=0, w=0; + + while (e) { + if (w == 0) { + // if we're currently at zero, we need to record the edge start point + x0 = e->x; w += e->direction; + } else { + int x1 = e->x; w += e->direction; + // if we went to zero, we need to draw + if (w == 0) { + int i = x0 >> STBTT_FIXSHIFT; + int j = x1 >> STBTT_FIXSHIFT; + + if (i < len && j >= 0) { + if (i == j) { + // x0,x1 are the same pixel, so compute combined coverage + scanline[i] = scanline[i] + (stbtt_uint8) ((x1 - x0) * max_weight >> STBTT_FIXSHIFT); + } else { + if (i >= 0) // add antialiasing for x0 + scanline[i] = scanline[i] + (stbtt_uint8) (((STBTT_FIX - (x0 & STBTT_FIXMASK)) * max_weight) >> STBTT_FIXSHIFT); + else + i = -1; // clip + + if (j < len) // add antialiasing for x1 + scanline[j] = scanline[j] + (stbtt_uint8) (((x1 & STBTT_FIXMASK) * max_weight) >> STBTT_FIXSHIFT); + else + j = len; // clip + + for (++i; i < j; ++i) // fill pixels between x0 and x1 + scanline[i] = scanline[i] + (stbtt_uint8) max_weight; + } + } + } + } + + e = e->next; + } +} + +static void stbtt__rasterize_sorted_edges(stbtt__bitmap *result, stbtt__edge *e, int n, int vsubsample, int off_x, int off_y, void *userdata) +{ + stbtt__hheap hh = { 0, 0, 0 }; + stbtt__active_edge *active = NULL; + int y,j=0; + int max_weight = (255 / vsubsample); // weight per vertical scanline + int s; // vertical subsample index + unsigned char scanline_data[512], *scanline; + + if (result->w > 512) + scanline = (unsigned char *) STBTT_malloc(result->w, userdata); + else + scanline = scanline_data; + + y = off_y * vsubsample; + e[n].y0 = (off_y + result->h) * (float) vsubsample + 1; + + while (j < result->h) { + STBTT_memset(scanline, 0, result->w); + for (s=0; s < vsubsample; ++s) { + // find center of pixel for this scanline + float scan_y = y + 0.5f; + stbtt__active_edge **step = &active; + + // update all active edges; + // remove all active edges that terminate before the center of this scanline + while (*step) { + stbtt__active_edge * z = *step; + if (z->ey <= scan_y) { + *step = z->next; // delete from list + STBTT_assert(z->direction); + z->direction = 0; + stbtt__hheap_free(&hh, z); + } else { + z->x += z->dx; // advance to position for current scanline + step = &((*step)->next); // advance through list + } + } + + // resort the list if needed + for(;;) { + int changed=0; + step = &active; + while (*step && (*step)->next) { + if ((*step)->x > (*step)->next->x) { + stbtt__active_edge *t = *step; + stbtt__active_edge *q = t->next; + + t->next = q->next; + q->next = t; + *step = q; + changed = 1; + } + step = &(*step)->next; + } + if (!changed) break; + } + + // insert all edges that start before the center of this scanline -- omit ones that also end on this scanline + while (e->y0 <= scan_y) { + if (e->y1 > scan_y) { + stbtt__active_edge *z = stbtt__new_active(&hh, e, off_x, scan_y, userdata); + if (z != NULL) { + // find insertion point + if (active == NULL) + active = z; + else if (z->x < active->x) { + // insert at front + z->next = active; + active = z; + } else { + // find thing to insert AFTER + stbtt__active_edge *p = active; + while (p->next && p->next->x < z->x) + p = p->next; + // at this point, p->next->x is NOT < z->x + z->next = p->next; + p->next = z; + } + } + } + ++e; + } + + // now process all active edges in XOR fashion + if (active) + stbtt__fill_active_edges(scanline, result->w, active, max_weight); + + ++y; + } + STBTT_memcpy(result->pixels + j * result->stride, scanline, result->w); + ++j; + } + + stbtt__hheap_cleanup(&hh, userdata); + + if (scanline != scanline_data) + STBTT_free(scanline, userdata); +} + +#elif STBTT_RASTERIZER_VERSION == 2 + +// the edge passed in here does not cross the vertical line at x or the vertical line at x+1 +// (i.e. it has already been clipped to those) +static void stbtt__handle_clipped_edge(float *scanline, int x, stbtt__active_edge *e, float x0, float y0, float x1, float y1) +{ + if (y0 == y1) return; + STBTT_assert(y0 < y1); + STBTT_assert(e->sy <= e->ey); + if (y0 > e->ey) return; + if (y1 < e->sy) return; + if (y0 < e->sy) { + x0 += (x1-x0) * (e->sy - y0) / (y1-y0); + y0 = e->sy; + } + if (y1 > e->ey) { + x1 += (x1-x0) * (e->ey - y1) / (y1-y0); + y1 = e->ey; + } + + if (x0 == x) + STBTT_assert(x1 <= x+1); + else if (x0 == x+1) + STBTT_assert(x1 >= x); + else if (x0 <= x) + STBTT_assert(x1 <= x); + else if (x0 >= x+1) + STBTT_assert(x1 >= x+1); + else + STBTT_assert(x1 >= x && x1 <= x+1); + + if (x0 <= x && x1 <= x) + scanline[x] += e->direction * (y1-y0); + else if (x0 >= x+1 && x1 >= x+1) + ; + else { + STBTT_assert(x0 >= x && x0 <= x+1 && x1 >= x && x1 <= x+1); + scanline[x] += e->direction * (y1-y0) * (1-((x0-x)+(x1-x))/2); // coverage = 1 - average x position + } +} + +static float stbtt__sized_trapezoid_area(float height, float top_width, float bottom_width) +{ + STBTT_assert(top_width >= 0); + STBTT_assert(bottom_width >= 0); + return (top_width + bottom_width) / 2.0f * height; +} + +static float stbtt__position_trapezoid_area(float height, float tx0, float tx1, float bx0, float bx1) +{ + return stbtt__sized_trapezoid_area(height, tx1 - tx0, bx1 - bx0); +} + +static float stbtt__sized_triangle_area(float height, float width) +{ + return height * width / 2; +} + +static void stbtt__fill_active_edges_new(float *scanline, float *scanline_fill, int len, stbtt__active_edge *e, float y_top) +{ + float y_bottom = y_top+1; + + while (e) { + // brute force every pixel + + // compute intersection points with top & bottom + STBTT_assert(e->ey >= y_top); + + if (e->fdx == 0) { + float x0 = e->fx; + if (x0 < len) { + if (x0 >= 0) { + stbtt__handle_clipped_edge(scanline,(int) x0,e, x0,y_top, x0,y_bottom); + stbtt__handle_clipped_edge(scanline_fill-1,(int) x0+1,e, x0,y_top, x0,y_bottom); + } else { + stbtt__handle_clipped_edge(scanline_fill-1,0,e, x0,y_top, x0,y_bottom); + } + } + } else { + float x0 = e->fx; + float dx = e->fdx; + float xb = x0 + dx; + float x_top, x_bottom; + float sy0,sy1; + float dy = e->fdy; + STBTT_assert(e->sy <= y_bottom && e->ey >= y_top); + + // compute endpoints of line segment clipped to this scanline (if the + // line segment starts on this scanline. x0 is the intersection of the + // line with y_top, but that may be off the line segment. + if (e->sy > y_top) { + x_top = x0 + dx * (e->sy - y_top); + sy0 = e->sy; + } else { + x_top = x0; + sy0 = y_top; + } + if (e->ey < y_bottom) { + x_bottom = x0 + dx * (e->ey - y_top); + sy1 = e->ey; + } else { + x_bottom = xb; + sy1 = y_bottom; + } + + if (x_top >= 0 && x_bottom >= 0 && x_top < len && x_bottom < len) { + // from here on, we don't have to range check x values + + if ((int) x_top == (int) x_bottom) { + float height; + // simple case, only spans one pixel + int x = (int) x_top; + height = (sy1 - sy0) * e->direction; + STBTT_assert(x >= 0 && x < len); + scanline[x] += stbtt__position_trapezoid_area(height, x_top, x+1.0f, x_bottom, x+1.0f); + scanline_fill[x] += height; // everything right of this pixel is filled + } else { + int x,x1,x2; + float y_crossing, y_final, step, sign, area; + // covers 2+ pixels + if (x_top > x_bottom) { + // flip scanline vertically; signed area is the same + float t; + sy0 = y_bottom - (sy0 - y_top); + sy1 = y_bottom - (sy1 - y_top); + t = sy0, sy0 = sy1, sy1 = t; + t = x_bottom, x_bottom = x_top, x_top = t; + dx = -dx; + dy = -dy; + t = x0, x0 = xb, xb = t; + } + STBTT_assert(dy >= 0); + STBTT_assert(dx >= 0); + + x1 = (int) x_top; + x2 = (int) x_bottom; + // compute intersection with y axis at x1+1 + y_crossing = y_top + dy * (x1+1 - x0); + + // compute intersection with y axis at x2 + y_final = y_top + dy * (x2 - x0); + + // x1 x_top x2 x_bottom + // y_top +------|-----+------------+------------+--------|---+------------+ + // | | | | | | + // | | | | | | + // sy0 | Txxxxx|............|............|............|............| + // y_crossing | *xxxxx.......|............|............|............| + // | | xxxxx..|............|............|............| + // | | /- xx*xxxx........|............|............| + // | | dy < | xxxxxx..|............|............| + // y_final | | \- | xx*xxx.........|............| + // sy1 | | | | xxxxxB...|............| + // | | | | | | + // | | | | | | + // y_bottom +------------+------------+------------+------------+------------+ + // + // goal is to measure the area covered by '.' in each pixel + + // if x2 is right at the right edge of x1, y_crossing can blow up, github #1057 + // @TODO: maybe test against sy1 rather than y_bottom? + if (y_crossing > y_bottom) + y_crossing = y_bottom; + + sign = e->direction; + + // area of the rectangle covered from sy0..y_crossing + area = sign * (y_crossing-sy0); + + // area of the triangle (x_top,sy0), (x1+1,sy0), (x1+1,y_crossing) + scanline[x1] += stbtt__sized_triangle_area(area, x1+1 - x_top); + + // check if final y_crossing is blown up; no test case for this + if (y_final > y_bottom) { + y_final = y_bottom; + dy = (y_final - y_crossing ) / (x2 - (x1+1)); // if denom=0, y_final = y_crossing, so y_final <= y_bottom + } + + // in second pixel, area covered by line segment found in first pixel + // is always a rectangle 1 wide * the height of that line segment; this + // is exactly what the variable 'area' stores. it also gets a contribution + // from the line segment within it. the THIRD pixel will get the first + // pixel's rectangle contribution, the second pixel's rectangle contribution, + // and its own contribution. the 'own contribution' is the same in every pixel except + // the leftmost and rightmost, a trapezoid that slides down in each pixel. + // the second pixel's contribution to the third pixel will be the + // rectangle 1 wide times the height change in the second pixel, which is dy. + + step = sign * dy * 1; // dy is dy/dx, change in y for every 1 change in x, + // which multiplied by 1-pixel-width is how much pixel area changes for each step in x + // so the area advances by 'step' every time + + for (x = x1+1; x < x2; ++x) { + scanline[x] += area + step/2; // area of trapezoid is 1*step/2 + area += step; + } + STBTT_assert(STBTT_fabs(area) <= 1.01f); // accumulated error from area += step unless we round step down + STBTT_assert(sy1 > y_final-0.01f); + + // area covered in the last pixel is the rectangle from all the pixels to the left, + // plus the trapezoid filled by the line segment in this pixel all the way to the right edge + scanline[x2] += area + sign * stbtt__position_trapezoid_area(sy1-y_final, (float) x2, x2+1.0f, x_bottom, x2+1.0f); + + // the rest of the line is filled based on the total height of the line segment in this pixel + scanline_fill[x2] += sign * (sy1-sy0); + } + } else { + // if edge goes outside of box we're drawing, we require + // clipping logic. since this does not match the intended use + // of this library, we use a different, very slow brute + // force implementation + // note though that this does happen some of the time because + // x_top and x_bottom can be extrapolated at the top & bottom of + // the shape and actually lie outside the bounding box + int x; + for (x=0; x < len; ++x) { + // cases: + // + // there can be up to two intersections with the pixel. any intersection + // with left or right edges can be handled by splitting into two (or three) + // regions. intersections with top & bottom do not necessitate case-wise logic. + // + // the old way of doing this found the intersections with the left & right edges, + // then used some simple logic to produce up to three segments in sorted order + // from top-to-bottom. however, this had a problem: if an x edge was epsilon + // across the x border, then the corresponding y position might not be distinct + // from the other y segment, and it might ignored as an empty segment. to avoid + // that, we need to explicitly produce segments based on x positions. + + // rename variables to clearly-defined pairs + float y0 = y_top; + float x1 = (float) (x); + float x2 = (float) (x+1); + float x3 = xb; + float y3 = y_bottom; + + // x = e->x + e->dx * (y-y_top) + // (y-y_top) = (x - e->x) / e->dx + // y = (x - e->x) / e->dx + y_top + float y1 = (x - x0) / dx + y_top; + float y2 = (x+1 - x0) / dx + y_top; + + if (x0 < x1 && x3 > x2) { // three segments descending down-right + stbtt__handle_clipped_edge(scanline,x,e, x0,y0, x1,y1); + stbtt__handle_clipped_edge(scanline,x,e, x1,y1, x2,y2); + stbtt__handle_clipped_edge(scanline,x,e, x2,y2, x3,y3); + } else if (x3 < x1 && x0 > x2) { // three segments descending down-left + stbtt__handle_clipped_edge(scanline,x,e, x0,y0, x2,y2); + stbtt__handle_clipped_edge(scanline,x,e, x2,y2, x1,y1); + stbtt__handle_clipped_edge(scanline,x,e, x1,y1, x3,y3); + } else if (x0 < x1 && x3 > x1) { // two segments across x, down-right + stbtt__handle_clipped_edge(scanline,x,e, x0,y0, x1,y1); + stbtt__handle_clipped_edge(scanline,x,e, x1,y1, x3,y3); + } else if (x3 < x1 && x0 > x1) { // two segments across x, down-left + stbtt__handle_clipped_edge(scanline,x,e, x0,y0, x1,y1); + stbtt__handle_clipped_edge(scanline,x,e, x1,y1, x3,y3); + } else if (x0 < x2 && x3 > x2) { // two segments across x+1, down-right + stbtt__handle_clipped_edge(scanline,x,e, x0,y0, x2,y2); + stbtt__handle_clipped_edge(scanline,x,e, x2,y2, x3,y3); + } else if (x3 < x2 && x0 > x2) { // two segments across x+1, down-left + stbtt__handle_clipped_edge(scanline,x,e, x0,y0, x2,y2); + stbtt__handle_clipped_edge(scanline,x,e, x2,y2, x3,y3); + } else { // one segment + stbtt__handle_clipped_edge(scanline,x,e, x0,y0, x3,y3); + } + } + } + } + e = e->next; + } +} + +// directly AA rasterize edges w/o supersampling +static void stbtt__rasterize_sorted_edges(stbtt__bitmap *result, stbtt__edge *e, int n, int vsubsample, int off_x, int off_y, void *userdata) +{ + stbtt__hheap hh = { 0, 0, 0 }; + stbtt__active_edge *active = NULL; + int y,j=0, i; + float scanline_data[129], *scanline, *scanline2; + + STBTT__NOTUSED(vsubsample); + + if (result->w > 64) + scanline = (float *) STBTT_malloc((result->w*2+1) * sizeof(float), userdata); + else + scanline = scanline_data; + + scanline2 = scanline + result->w; + + y = off_y; + e[n].y0 = (float) (off_y + result->h) + 1; + + while (j < result->h) { + // find center of pixel for this scanline + float scan_y_top = y + 0.0f; + float scan_y_bottom = y + 1.0f; + stbtt__active_edge **step = &active; + + STBTT_memset(scanline , 0, result->w*sizeof(scanline[0])); + STBTT_memset(scanline2, 0, (result->w+1)*sizeof(scanline[0])); + + // update all active edges; + // remove all active edges that terminate before the top of this scanline + while (*step) { + stbtt__active_edge * z = *step; + if (z->ey <= scan_y_top) { + *step = z->next; // delete from list + STBTT_assert(z->direction); + z->direction = 0; + stbtt__hheap_free(&hh, z); + } else { + step = &((*step)->next); // advance through list + } + } + + // insert all edges that start before the bottom of this scanline + while (e->y0 <= scan_y_bottom) { + if (e->y0 != e->y1) { + stbtt__active_edge *z = stbtt__new_active(&hh, e, off_x, scan_y_top, userdata); + if (z != NULL) { + if (j == 0 && off_y != 0) { + if (z->ey < scan_y_top) { + // this can happen due to subpixel positioning and some kind of fp rounding error i think + z->ey = scan_y_top; + } + } + STBTT_assert(z->ey >= scan_y_top); // if we get really unlucky a tiny bit of an edge can be out of bounds + // insert at front + z->next = active; + active = z; + } + } + ++e; + } + + // now process all active edges + if (active) + stbtt__fill_active_edges_new(scanline, scanline2+1, result->w, active, scan_y_top); + + { + float sum = 0; + for (i=0; i < result->w; ++i) { + float k; + int m; + sum += scanline2[i]; + k = scanline[i] + sum; + k = (float) STBTT_fabs(k)*255 + 0.5f; + m = (int) k; + if (m > 255) m = 255; + result->pixels[j*result->stride + i] = (unsigned char) m; + } + } + // advance all the edges + step = &active; + while (*step) { + stbtt__active_edge *z = *step; + z->fx += z->fdx; // advance to position for current scanline + step = &((*step)->next); // advance through list + } + + ++y; + ++j; + } + + stbtt__hheap_cleanup(&hh, userdata); + + if (scanline != scanline_data) + STBTT_free(scanline, userdata); +} +#else +#error "Unrecognized value of STBTT_RASTERIZER_VERSION" +#endif + +#define STBTT__COMPARE(a,b) ((a)->y0 < (b)->y0) + +static void stbtt__sort_edges_ins_sort(stbtt__edge *p, int n) +{ + int i,j; + for (i=1; i < n; ++i) { + stbtt__edge t = p[i], *a = &t; + j = i; + while (j > 0) { + stbtt__edge *b = &p[j-1]; + int c = STBTT__COMPARE(a,b); + if (!c) break; + p[j] = p[j-1]; + --j; + } + if (i != j) + p[j] = t; + } +} + +static void stbtt__sort_edges_quicksort(stbtt__edge *p, int n) +{ + /* threshold for transitioning to insertion sort */ + while (n > 12) { + stbtt__edge t; + int c01,c12,c,m,i,j; + + /* compute median of three */ + m = n >> 1; + c01 = STBTT__COMPARE(&p[0],&p[m]); + c12 = STBTT__COMPARE(&p[m],&p[n-1]); + /* if 0 >= mid >= end, or 0 < mid < end, then use mid */ + if (c01 != c12) { + /* otherwise, we'll need to swap something else to middle */ + int z; + c = STBTT__COMPARE(&p[0],&p[n-1]); + /* 0>mid && midn => n; 0 0 */ + /* 0n: 0>n => 0; 0 n */ + z = (c == c12) ? 0 : n-1; + t = p[z]; + p[z] = p[m]; + p[m] = t; + } + /* now p[m] is the median-of-three */ + /* swap it to the beginning so it won't move around */ + t = p[0]; + p[0] = p[m]; + p[m] = t; + + /* partition loop */ + i=1; + j=n-1; + for(;;) { + /* handling of equality is crucial here */ + /* for sentinels & efficiency with duplicates */ + for (;;++i) { + if (!STBTT__COMPARE(&p[i], &p[0])) break; + } + for (;;--j) { + if (!STBTT__COMPARE(&p[0], &p[j])) break; + } + /* make sure we haven't crossed */ + if (i >= j) break; + t = p[i]; + p[i] = p[j]; + p[j] = t; + + ++i; + --j; + } + /* recurse on smaller side, iterate on larger */ + if (j < (n-i)) { + stbtt__sort_edges_quicksort(p,j); + p = p+i; + n = n-i; + } else { + stbtt__sort_edges_quicksort(p+i, n-i); + n = j; + } + } +} + +static void stbtt__sort_edges(stbtt__edge *p, int n) +{ + stbtt__sort_edges_quicksort(p, n); + stbtt__sort_edges_ins_sort(p, n); +} + +typedef struct +{ + float x,y; +} stbtt__point; + +static void stbtt__rasterize(stbtt__bitmap *result, stbtt__point *pts, int *wcount, int windings, float scale_x, float scale_y, float shift_x, float shift_y, int off_x, int off_y, int invert, void *userdata) +{ + float y_scale_inv = invert ? -scale_y : scale_y; + stbtt__edge *e; + int n,i,j,k,m; +#if STBTT_RASTERIZER_VERSION == 1 + int vsubsample = result->h < 8 ? 15 : 5; +#elif STBTT_RASTERIZER_VERSION == 2 + int vsubsample = 1; +#else + #error "Unrecognized value of STBTT_RASTERIZER_VERSION" +#endif + // vsubsample should divide 255 evenly; otherwise we won't reach full opacity + + // now we have to blow out the windings into explicit edge lists + n = 0; + for (i=0; i < windings; ++i) + n += wcount[i]; + + e = (stbtt__edge *) STBTT_malloc(sizeof(*e) * (n+1), userdata); // add an extra one as a sentinel + if (e == 0) return; + n = 0; + + m=0; + for (i=0; i < windings; ++i) { + stbtt__point *p = pts + m; + m += wcount[i]; + j = wcount[i]-1; + for (k=0; k < wcount[i]; j=k++) { + int a=k,b=j; + // skip the edge if horizontal + if (p[j].y == p[k].y) + continue; + // add edge from j to k to the list + e[n].invert = 0; + if (invert ? p[j].y > p[k].y : p[j].y < p[k].y) { + e[n].invert = 1; + a=j,b=k; + } + e[n].x0 = p[a].x * scale_x + shift_x; + e[n].y0 = (p[a].y * y_scale_inv + shift_y) * vsubsample; + e[n].x1 = p[b].x * scale_x + shift_x; + e[n].y1 = (p[b].y * y_scale_inv + shift_y) * vsubsample; + ++n; + } + } + + // now sort the edges by their highest point (should snap to integer, and then by x) + //STBTT_sort(e, n, sizeof(e[0]), stbtt__edge_compare); + stbtt__sort_edges(e, n); + + // now, traverse the scanlines and find the intersections on each scanline, use xor winding rule + stbtt__rasterize_sorted_edges(result, e, n, vsubsample, off_x, off_y, userdata); + + STBTT_free(e, userdata); +} + +static void stbtt__add_point(stbtt__point *points, int n, float x, float y) +{ + if (!points) return; // during first pass, it's unallocated + points[n].x = x; + points[n].y = y; +} + +// tessellate until threshold p is happy... @TODO warped to compensate for non-linear stretching +static int stbtt__tesselate_curve(stbtt__point *points, int *num_points, float x0, float y0, float x1, float y1, float x2, float y2, float objspace_flatness_squared, int n) +{ + // midpoint + float mx = (x0 + 2*x1 + x2)/4; + float my = (y0 + 2*y1 + y2)/4; + // versus directly drawn line + float dx = (x0+x2)/2 - mx; + float dy = (y0+y2)/2 - my; + if (n > 16) // 65536 segments on one curve better be enough! + return 1; + if (dx*dx+dy*dy > objspace_flatness_squared) { // half-pixel error allowed... need to be smaller if AA + stbtt__tesselate_curve(points, num_points, x0,y0, (x0+x1)/2.0f,(y0+y1)/2.0f, mx,my, objspace_flatness_squared,n+1); + stbtt__tesselate_curve(points, num_points, mx,my, (x1+x2)/2.0f,(y1+y2)/2.0f, x2,y2, objspace_flatness_squared,n+1); + } else { + stbtt__add_point(points, *num_points,x2,y2); + *num_points = *num_points+1; + } + return 1; +} + +static void stbtt__tesselate_cubic(stbtt__point *points, int *num_points, float x0, float y0, float x1, float y1, float x2, float y2, float x3, float y3, float objspace_flatness_squared, int n) +{ + // @TODO this "flatness" calculation is just made-up nonsense that seems to work well enough + float dx0 = x1-x0; + float dy0 = y1-y0; + float dx1 = x2-x1; + float dy1 = y2-y1; + float dx2 = x3-x2; + float dy2 = y3-y2; + float dx = x3-x0; + float dy = y3-y0; + float longlen = (float) (STBTT_sqrt(dx0*dx0+dy0*dy0)+STBTT_sqrt(dx1*dx1+dy1*dy1)+STBTT_sqrt(dx2*dx2+dy2*dy2)); + float shortlen = (float) STBTT_sqrt(dx*dx+dy*dy); + float flatness_squared = longlen*longlen-shortlen*shortlen; + + if (n > 16) // 65536 segments on one curve better be enough! + return; + + if (flatness_squared > objspace_flatness_squared) { + float x01 = (x0+x1)/2; + float y01 = (y0+y1)/2; + float x12 = (x1+x2)/2; + float y12 = (y1+y2)/2; + float x23 = (x2+x3)/2; + float y23 = (y2+y3)/2; + + float xa = (x01+x12)/2; + float ya = (y01+y12)/2; + float xb = (x12+x23)/2; + float yb = (y12+y23)/2; + + float mx = (xa+xb)/2; + float my = (ya+yb)/2; + + stbtt__tesselate_cubic(points, num_points, x0,y0, x01,y01, xa,ya, mx,my, objspace_flatness_squared,n+1); + stbtt__tesselate_cubic(points, num_points, mx,my, xb,yb, x23,y23, x3,y3, objspace_flatness_squared,n+1); + } else { + stbtt__add_point(points, *num_points,x3,y3); + *num_points = *num_points+1; + } +} + +// returns number of contours +static stbtt__point *stbtt_FlattenCurves(stbtt_vertex *vertices, int num_verts, float objspace_flatness, int **contour_lengths, int *num_contours, void *userdata) +{ + stbtt__point *points=0; + int num_points=0; + + float objspace_flatness_squared = objspace_flatness * objspace_flatness; + int i,n=0,start=0, pass; + + // count how many "moves" there are to get the contour count + for (i=0; i < num_verts; ++i) + if (vertices[i].type == STBTT_vmove) + ++n; + + *num_contours = n; + if (n == 0) return 0; + + *contour_lengths = (int *) STBTT_malloc(sizeof(**contour_lengths) * n, userdata); + + if (*contour_lengths == 0) { + *num_contours = 0; + return 0; + } + + // make two passes through the points so we don't need to realloc + for (pass=0; pass < 2; ++pass) { + float x=0,y=0; + if (pass == 1) { + points = (stbtt__point *) STBTT_malloc(num_points * sizeof(points[0]), userdata); + if (points == NULL) goto error; + } + num_points = 0; + n= -1; + for (i=0; i < num_verts; ++i) { + switch (vertices[i].type) { + case STBTT_vmove: + // start the next contour + if (n >= 0) + (*contour_lengths)[n] = num_points - start; + ++n; + start = num_points; + + x = vertices[i].x, y = vertices[i].y; + stbtt__add_point(points, num_points++, x,y); + break; + case STBTT_vline: + x = vertices[i].x, y = vertices[i].y; + stbtt__add_point(points, num_points++, x, y); + break; + case STBTT_vcurve: + stbtt__tesselate_curve(points, &num_points, x,y, + vertices[i].cx, vertices[i].cy, + vertices[i].x, vertices[i].y, + objspace_flatness_squared, 0); + x = vertices[i].x, y = vertices[i].y; + break; + case STBTT_vcubic: + stbtt__tesselate_cubic(points, &num_points, x,y, + vertices[i].cx, vertices[i].cy, + vertices[i].cx1, vertices[i].cy1, + vertices[i].x, vertices[i].y, + objspace_flatness_squared, 0); + x = vertices[i].x, y = vertices[i].y; + break; + } + } + (*contour_lengths)[n] = num_points - start; + } + + return points; +error: + STBTT_free(points, userdata); + STBTT_free(*contour_lengths, userdata); + *contour_lengths = 0; + *num_contours = 0; + return NULL; +} + +STBTT_DEF void stbtt_Rasterize(stbtt__bitmap *result, float flatness_in_pixels, stbtt_vertex *vertices, int num_verts, float scale_x, float scale_y, float shift_x, float shift_y, int x_off, int y_off, int invert, void *userdata) +{ + float scale = scale_x > scale_y ? scale_y : scale_x; + int winding_count = 0; + int *winding_lengths = NULL; + stbtt__point *windings = stbtt_FlattenCurves(vertices, num_verts, flatness_in_pixels / scale, &winding_lengths, &winding_count, userdata); + if (windings) { + stbtt__rasterize(result, windings, winding_lengths, winding_count, scale_x, scale_y, shift_x, shift_y, x_off, y_off, invert, userdata); + STBTT_free(winding_lengths, userdata); + STBTT_free(windings, userdata); + } +} + +STBTT_DEF void stbtt_FreeBitmap(unsigned char *bitmap, void *userdata) +{ + STBTT_free(bitmap, userdata); +} + +STBTT_DEF unsigned char *stbtt_GetGlyphBitmapSubpixel(const stbtt_fontinfo *info, float scale_x, float scale_y, float shift_x, float shift_y, int glyph, int *width, int *height, int *xoff, int *yoff) +{ + int ix0,iy0,ix1,iy1; + stbtt__bitmap gbm; + stbtt_vertex *vertices; + int num_verts = stbtt_GetGlyphShape(info, glyph, &vertices); + + if (scale_x == 0) scale_x = scale_y; + if (scale_y == 0) { + if (scale_x == 0) { + STBTT_free(vertices, info->userdata); + return NULL; + } + scale_y = scale_x; + } + + stbtt_GetGlyphBitmapBoxSubpixel(info, glyph, scale_x, scale_y, shift_x, shift_y, &ix0,&iy0,&ix1,&iy1); + + // now we get the size + gbm.w = (ix1 - ix0); + gbm.h = (iy1 - iy0); + gbm.pixels = NULL; // in case we error + + if (width ) *width = gbm.w; + if (height) *height = gbm.h; + if (xoff ) *xoff = ix0; + if (yoff ) *yoff = iy0; + + if (gbm.w && gbm.h) { + gbm.pixels = (unsigned char *) STBTT_malloc(gbm.w * gbm.h, info->userdata); + if (gbm.pixels) { + gbm.stride = gbm.w; + + stbtt_Rasterize(&gbm, 0.35f, vertices, num_verts, scale_x, scale_y, shift_x, shift_y, ix0, iy0, 1, info->userdata); + } + } + STBTT_free(vertices, info->userdata); + return gbm.pixels; +} + +STBTT_DEF unsigned char *stbtt_GetGlyphBitmap(const stbtt_fontinfo *info, float scale_x, float scale_y, int glyph, int *width, int *height, int *xoff, int *yoff) +{ + return stbtt_GetGlyphBitmapSubpixel(info, scale_x, scale_y, 0.0f, 0.0f, glyph, width, height, xoff, yoff); +} + +STBTT_DEF void stbtt_MakeGlyphBitmapSubpixel(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, float shift_x, float shift_y, int glyph) +{ + int ix0,iy0; + stbtt_vertex *vertices; + int num_verts = stbtt_GetGlyphShape(info, glyph, &vertices); + stbtt__bitmap gbm; + + stbtt_GetGlyphBitmapBoxSubpixel(info, glyph, scale_x, scale_y, shift_x, shift_y, &ix0,&iy0,0,0); + gbm.pixels = output; + gbm.w = out_w; + gbm.h = out_h; + gbm.stride = out_stride; + + if (gbm.w && gbm.h) + stbtt_Rasterize(&gbm, 0.35f, vertices, num_verts, scale_x, scale_y, shift_x, shift_y, ix0,iy0, 1, info->userdata); + + STBTT_free(vertices, info->userdata); +} + +STBTT_DEF void stbtt_MakeGlyphBitmap(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, int glyph) +{ + stbtt_MakeGlyphBitmapSubpixel(info, output, out_w, out_h, out_stride, scale_x, scale_y, 0.0f,0.0f, glyph); +} + +STBTT_DEF unsigned char *stbtt_GetCodepointBitmapSubpixel(const stbtt_fontinfo *info, float scale_x, float scale_y, float shift_x, float shift_y, int codepoint, int *width, int *height, int *xoff, int *yoff) +{ + return stbtt_GetGlyphBitmapSubpixel(info, scale_x, scale_y,shift_x,shift_y, stbtt_FindGlyphIndex(info,codepoint), width,height,xoff,yoff); +} + +STBTT_DEF void stbtt_MakeCodepointBitmapSubpixelPrefilter(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, float shift_x, float shift_y, int oversample_x, int oversample_y, float *sub_x, float *sub_y, int codepoint) +{ + stbtt_MakeGlyphBitmapSubpixelPrefilter(info, output, out_w, out_h, out_stride, scale_x, scale_y, shift_x, shift_y, oversample_x, oversample_y, sub_x, sub_y, stbtt_FindGlyphIndex(info,codepoint)); +} + +STBTT_DEF void stbtt_MakeCodepointBitmapSubpixel(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, float shift_x, float shift_y, int codepoint) +{ + stbtt_MakeGlyphBitmapSubpixel(info, output, out_w, out_h, out_stride, scale_x, scale_y, shift_x, shift_y, stbtt_FindGlyphIndex(info,codepoint)); +} + +STBTT_DEF unsigned char *stbtt_GetCodepointBitmap(const stbtt_fontinfo *info, float scale_x, float scale_y, int codepoint, int *width, int *height, int *xoff, int *yoff) +{ + return stbtt_GetCodepointBitmapSubpixel(info, scale_x, scale_y, 0.0f,0.0f, codepoint, width,height,xoff,yoff); +} + +STBTT_DEF void stbtt_MakeCodepointBitmap(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, int codepoint) +{ + stbtt_MakeCodepointBitmapSubpixel(info, output, out_w, out_h, out_stride, scale_x, scale_y, 0.0f,0.0f, codepoint); +} + +////////////////////////////////////////////////////////////////////////////// +// +// bitmap baking +// +// This is SUPER-CRAPPY packing to keep source code small + +static int stbtt_BakeFontBitmap_internal(unsigned char *data, int offset, // font location (use offset=0 for plain .ttf) + float pixel_height, // height of font in pixels + unsigned char *pixels, int pw, int ph, // bitmap to be filled in + int first_char, int num_chars, // characters to bake + stbtt_bakedchar *chardata) +{ + float scale; + int x,y,bottom_y, i; + stbtt_fontinfo f; + f.userdata = NULL; + if (!stbtt_InitFont(&f, data, offset)) + return -1; + STBTT_memset(pixels, 0, pw*ph); // background of 0 around pixels + x=y=1; + bottom_y = 1; + + scale = stbtt_ScaleForPixelHeight(&f, pixel_height); + + for (i=0; i < num_chars; ++i) { + int advance, lsb, x0,y0,x1,y1,gw,gh; + int g = stbtt_FindGlyphIndex(&f, first_char + i); + stbtt_GetGlyphHMetrics(&f, g, &advance, &lsb); + stbtt_GetGlyphBitmapBox(&f, g, scale,scale, &x0,&y0,&x1,&y1); + gw = x1-x0; + gh = y1-y0; + if (x + gw + 1 >= pw) + y = bottom_y, x = 1; // advance to next row + if (y + gh + 1 >= ph) // check if it fits vertically AFTER potentially moving to next row + return -i; + STBTT_assert(x+gw < pw); + STBTT_assert(y+gh < ph); + stbtt_MakeGlyphBitmap(&f, pixels+x+y*pw, gw,gh,pw, scale,scale, g); + chardata[i].x0 = (stbtt_int16) x; + chardata[i].y0 = (stbtt_int16) y; + chardata[i].x1 = (stbtt_int16) (x + gw); + chardata[i].y1 = (stbtt_int16) (y + gh); + chardata[i].xadvance = scale * advance; + chardata[i].xoff = (float) x0; + chardata[i].yoff = (float) y0; + x = x + gw + 1; + if (y+gh+1 > bottom_y) + bottom_y = y+gh+1; + } + return bottom_y; +} + +STBTT_DEF void stbtt_GetBakedQuad(const stbtt_bakedchar *chardata, int pw, int ph, int char_index, float *xpos, float *ypos, stbtt_aligned_quad *q, int opengl_fillrule) +{ + float d3d_bias = opengl_fillrule ? 0 : -0.5f; + float ipw = 1.0f / pw, iph = 1.0f / ph; + const stbtt_bakedchar *b = chardata + char_index; + int round_x = STBTT_ifloor((*xpos + b->xoff) + 0.5f); + int round_y = STBTT_ifloor((*ypos + b->yoff) + 0.5f); + + q->x0 = round_x + d3d_bias; + q->y0 = round_y + d3d_bias; + q->x1 = round_x + b->x1 - b->x0 + d3d_bias; + q->y1 = round_y + b->y1 - b->y0 + d3d_bias; + + q->s0 = b->x0 * ipw; + q->t0 = b->y0 * iph; + q->s1 = b->x1 * ipw; + q->t1 = b->y1 * iph; + + *xpos += b->xadvance; +} + +////////////////////////////////////////////////////////////////////////////// +// +// rectangle packing replacement routines if you don't have stb_rect_pack.h +// + +#ifndef STB_RECT_PACK_VERSION + +typedef int stbrp_coord; + +//////////////////////////////////////////////////////////////////////////////////// +// // +// // +// COMPILER WARNING ?!?!? // +// // +// // +// if you get a compile warning due to these symbols being defined more than // +// once, move #include "stb_rect_pack.h" before #include "stb_truetype.h" // +// // +//////////////////////////////////////////////////////////////////////////////////// + +typedef struct +{ + int width,height; + int x,y,bottom_y; +} stbrp_context; + +typedef struct +{ + unsigned char x; +} stbrp_node; + +struct stbrp_rect +{ + stbrp_coord x,y; + int id,w,h,was_packed; +}; + +static void stbrp_init_target(stbrp_context *con, int pw, int ph, stbrp_node *nodes, int num_nodes) +{ + con->width = pw; + con->height = ph; + con->x = 0; + con->y = 0; + con->bottom_y = 0; + STBTT__NOTUSED(nodes); + STBTT__NOTUSED(num_nodes); +} + +static void stbrp_pack_rects(stbrp_context *con, stbrp_rect *rects, int num_rects) +{ + int i; + for (i=0; i < num_rects; ++i) { + if (con->x + rects[i].w > con->width) { + con->x = 0; + con->y = con->bottom_y; + } + if (con->y + rects[i].h > con->height) + break; + rects[i].x = con->x; + rects[i].y = con->y; + rects[i].was_packed = 1; + con->x += rects[i].w; + if (con->y + rects[i].h > con->bottom_y) + con->bottom_y = con->y + rects[i].h; + } + for ( ; i < num_rects; ++i) + rects[i].was_packed = 0; +} +#endif + +////////////////////////////////////////////////////////////////////////////// +// +// bitmap baking +// +// This is SUPER-AWESOME (tm Ryan Gordon) packing using stb_rect_pack.h. If +// stb_rect_pack.h isn't available, it uses the BakeFontBitmap strategy. + +STBTT_DEF int stbtt_PackBegin(stbtt_pack_context *spc, unsigned char *pixels, int pw, int ph, int stride_in_bytes, int padding, void *alloc_context) +{ + stbrp_context *context = (stbrp_context *) STBTT_malloc(sizeof(*context) ,alloc_context); + int num_nodes = pw - padding; + stbrp_node *nodes = (stbrp_node *) STBTT_malloc(sizeof(*nodes ) * num_nodes,alloc_context); + + if (context == NULL || nodes == NULL) { + if (context != NULL) STBTT_free(context, alloc_context); + if (nodes != NULL) STBTT_free(nodes , alloc_context); + return 0; + } + + spc->user_allocator_context = alloc_context; + spc->width = pw; + spc->height = ph; + spc->pixels = pixels; + spc->pack_info = context; + spc->nodes = nodes; + spc->padding = padding; + spc->stride_in_bytes = stride_in_bytes != 0 ? stride_in_bytes : pw; + spc->h_oversample = 1; + spc->v_oversample = 1; + spc->skip_missing = 0; + + stbrp_init_target(context, pw-padding, ph-padding, nodes, num_nodes); + + if (pixels) + STBTT_memset(pixels, 0, pw*ph); // background of 0 around pixels + + return 1; +} + +STBTT_DEF void stbtt_PackEnd (stbtt_pack_context *spc) +{ + STBTT_free(spc->nodes , spc->user_allocator_context); + STBTT_free(spc->pack_info, spc->user_allocator_context); +} + +STBTT_DEF void stbtt_PackSetOversampling(stbtt_pack_context *spc, unsigned int h_oversample, unsigned int v_oversample) +{ + STBTT_assert(h_oversample <= STBTT_MAX_OVERSAMPLE); + STBTT_assert(v_oversample <= STBTT_MAX_OVERSAMPLE); + if (h_oversample <= STBTT_MAX_OVERSAMPLE) + spc->h_oversample = h_oversample; + if (v_oversample <= STBTT_MAX_OVERSAMPLE) + spc->v_oversample = v_oversample; +} + +STBTT_DEF void stbtt_PackSetSkipMissingCodepoints(stbtt_pack_context *spc, int skip) +{ + spc->skip_missing = skip; +} + +#define STBTT__OVER_MASK (STBTT_MAX_OVERSAMPLE-1) + +static void stbtt__h_prefilter(unsigned char *pixels, int w, int h, int stride_in_bytes, unsigned int kernel_width) +{ + unsigned char buffer[STBTT_MAX_OVERSAMPLE]; + int safe_w = w - kernel_width; + int j; + STBTT_memset(buffer, 0, STBTT_MAX_OVERSAMPLE); // suppress bogus warning from VS2013 -analyze + for (j=0; j < h; ++j) { + int i; + unsigned int total; + STBTT_memset(buffer, 0, kernel_width); + + total = 0; + + // make kernel_width a constant in common cases so compiler can optimize out the divide + switch (kernel_width) { + case 2: + for (i=0; i <= safe_w; ++i) { + total += pixels[i] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i]; + pixels[i] = (unsigned char) (total / 2); + } + break; + case 3: + for (i=0; i <= safe_w; ++i) { + total += pixels[i] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i]; + pixels[i] = (unsigned char) (total / 3); + } + break; + case 4: + for (i=0; i <= safe_w; ++i) { + total += pixels[i] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i]; + pixels[i] = (unsigned char) (total / 4); + } + break; + case 5: + for (i=0; i <= safe_w; ++i) { + total += pixels[i] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i]; + pixels[i] = (unsigned char) (total / 5); + } + break; + default: + for (i=0; i <= safe_w; ++i) { + total += pixels[i] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i]; + pixels[i] = (unsigned char) (total / kernel_width); + } + break; + } + + for (; i < w; ++i) { + STBTT_assert(pixels[i] == 0); + total -= buffer[i & STBTT__OVER_MASK]; + pixels[i] = (unsigned char) (total / kernel_width); + } + + pixels += stride_in_bytes; + } +} + +static void stbtt__v_prefilter(unsigned char *pixels, int w, int h, int stride_in_bytes, unsigned int kernel_width) +{ + unsigned char buffer[STBTT_MAX_OVERSAMPLE]; + int safe_h = h - kernel_width; + int j; + STBTT_memset(buffer, 0, STBTT_MAX_OVERSAMPLE); // suppress bogus warning from VS2013 -analyze + for (j=0; j < w; ++j) { + int i; + unsigned int total; + STBTT_memset(buffer, 0, kernel_width); + + total = 0; + + // make kernel_width a constant in common cases so compiler can optimize out the divide + switch (kernel_width) { + case 2: + for (i=0; i <= safe_h; ++i) { + total += pixels[i*stride_in_bytes] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i*stride_in_bytes]; + pixels[i*stride_in_bytes] = (unsigned char) (total / 2); + } + break; + case 3: + for (i=0; i <= safe_h; ++i) { + total += pixels[i*stride_in_bytes] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i*stride_in_bytes]; + pixels[i*stride_in_bytes] = (unsigned char) (total / 3); + } + break; + case 4: + for (i=0; i <= safe_h; ++i) { + total += pixels[i*stride_in_bytes] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i*stride_in_bytes]; + pixels[i*stride_in_bytes] = (unsigned char) (total / 4); + } + break; + case 5: + for (i=0; i <= safe_h; ++i) { + total += pixels[i*stride_in_bytes] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i*stride_in_bytes]; + pixels[i*stride_in_bytes] = (unsigned char) (total / 5); + } + break; + default: + for (i=0; i <= safe_h; ++i) { + total += pixels[i*stride_in_bytes] - buffer[i & STBTT__OVER_MASK]; + buffer[(i+kernel_width) & STBTT__OVER_MASK] = pixels[i*stride_in_bytes]; + pixels[i*stride_in_bytes] = (unsigned char) (total / kernel_width); + } + break; + } + + for (; i < h; ++i) { + STBTT_assert(pixels[i*stride_in_bytes] == 0); + total -= buffer[i & STBTT__OVER_MASK]; + pixels[i*stride_in_bytes] = (unsigned char) (total / kernel_width); + } + + pixels += 1; + } +} + +static float stbtt__oversample_shift(int oversample) +{ + if (!oversample) + return 0.0f; + + // The prefilter is a box filter of width "oversample", + // which shifts phase by (oversample - 1)/2 pixels in + // oversampled space. We want to shift in the opposite + // direction to counter this. + return (float)-(oversample - 1) / (2.0f * (float)oversample); +} + +// rects array must be big enough to accommodate all characters in the given ranges +STBTT_DEF int stbtt_PackFontRangesGatherRects(stbtt_pack_context *spc, const stbtt_fontinfo *info, stbtt_pack_range *ranges, int num_ranges, stbrp_rect *rects) +{ + int i,j,k; + int missing_glyph_added = 0; + + k=0; + for (i=0; i < num_ranges; ++i) { + float fh = ranges[i].font_size; + float scale = fh > 0 ? stbtt_ScaleForPixelHeight(info, fh) : stbtt_ScaleForMappingEmToPixels(info, -fh); + ranges[i].h_oversample = (unsigned char) spc->h_oversample; + ranges[i].v_oversample = (unsigned char) spc->v_oversample; + for (j=0; j < ranges[i].num_chars; ++j) { + int x0,y0,x1,y1; + int codepoint = ranges[i].array_of_unicode_codepoints == NULL ? ranges[i].first_unicode_codepoint_in_range + j : ranges[i].array_of_unicode_codepoints[j]; + int glyph = stbtt_FindGlyphIndex(info, codepoint); + if (glyph == 0 && (spc->skip_missing || missing_glyph_added)) { + rects[k].w = rects[k].h = 0; + } else { + stbtt_GetGlyphBitmapBoxSubpixel(info,glyph, + scale * spc->h_oversample, + scale * spc->v_oversample, + 0,0, + &x0,&y0,&x1,&y1); + rects[k].w = (stbrp_coord) (x1-x0 + spc->padding + spc->h_oversample-1); + rects[k].h = (stbrp_coord) (y1-y0 + spc->padding + spc->v_oversample-1); + if (glyph == 0) + missing_glyph_added = 1; + } + ++k; + } + } + + return k; +} + +STBTT_DEF void stbtt_MakeGlyphBitmapSubpixelPrefilter(const stbtt_fontinfo *info, unsigned char *output, int out_w, int out_h, int out_stride, float scale_x, float scale_y, float shift_x, float shift_y, int prefilter_x, int prefilter_y, float *sub_x, float *sub_y, int glyph) +{ + stbtt_MakeGlyphBitmapSubpixel(info, + output, + out_w - (prefilter_x - 1), + out_h - (prefilter_y - 1), + out_stride, + scale_x, + scale_y, + shift_x, + shift_y, + glyph); + + if (prefilter_x > 1) + stbtt__h_prefilter(output, out_w, out_h, out_stride, prefilter_x); + + if (prefilter_y > 1) + stbtt__v_prefilter(output, out_w, out_h, out_stride, prefilter_y); + + *sub_x = stbtt__oversample_shift(prefilter_x); + *sub_y = stbtt__oversample_shift(prefilter_y); +} + +// rects array must be big enough to accommodate all characters in the given ranges +STBTT_DEF int stbtt_PackFontRangesRenderIntoRects(stbtt_pack_context *spc, const stbtt_fontinfo *info, stbtt_pack_range *ranges, int num_ranges, stbrp_rect *rects) +{ + int i,j,k, missing_glyph = -1, return_value = 1; + + // save current values + int old_h_over = spc->h_oversample; + int old_v_over = spc->v_oversample; + + k = 0; + for (i=0; i < num_ranges; ++i) { + float fh = ranges[i].font_size; + float scale = fh > 0 ? stbtt_ScaleForPixelHeight(info, fh) : stbtt_ScaleForMappingEmToPixels(info, -fh); + float recip_h,recip_v,sub_x,sub_y; + spc->h_oversample = ranges[i].h_oversample; + spc->v_oversample = ranges[i].v_oversample; + recip_h = 1.0f / spc->h_oversample; + recip_v = 1.0f / spc->v_oversample; + sub_x = stbtt__oversample_shift(spc->h_oversample); + sub_y = stbtt__oversample_shift(spc->v_oversample); + for (j=0; j < ranges[i].num_chars; ++j) { + stbrp_rect *r = &rects[k]; + if (r->was_packed && r->w != 0 && r->h != 0) { + stbtt_packedchar *bc = &ranges[i].chardata_for_range[j]; + int advance, lsb, x0,y0,x1,y1; + int codepoint = ranges[i].array_of_unicode_codepoints == NULL ? ranges[i].first_unicode_codepoint_in_range + j : ranges[i].array_of_unicode_codepoints[j]; + int glyph = stbtt_FindGlyphIndex(info, codepoint); + stbrp_coord pad = (stbrp_coord) spc->padding; + + // pad on left and top + r->x += pad; + r->y += pad; + r->w -= pad; + r->h -= pad; + stbtt_GetGlyphHMetrics(info, glyph, &advance, &lsb); + stbtt_GetGlyphBitmapBox(info, glyph, + scale * spc->h_oversample, + scale * spc->v_oversample, + &x0,&y0,&x1,&y1); + stbtt_MakeGlyphBitmapSubpixel(info, + spc->pixels + r->x + r->y*spc->stride_in_bytes, + r->w - spc->h_oversample+1, + r->h - spc->v_oversample+1, + spc->stride_in_bytes, + scale * spc->h_oversample, + scale * spc->v_oversample, + 0,0, + glyph); + + if (spc->h_oversample > 1) + stbtt__h_prefilter(spc->pixels + r->x + r->y*spc->stride_in_bytes, + r->w, r->h, spc->stride_in_bytes, + spc->h_oversample); + + if (spc->v_oversample > 1) + stbtt__v_prefilter(spc->pixels + r->x + r->y*spc->stride_in_bytes, + r->w, r->h, spc->stride_in_bytes, + spc->v_oversample); + + bc->x0 = (stbtt_int16) r->x; + bc->y0 = (stbtt_int16) r->y; + bc->x1 = (stbtt_int16) (r->x + r->w); + bc->y1 = (stbtt_int16) (r->y + r->h); + bc->xadvance = scale * advance; + bc->xoff = (float) x0 * recip_h + sub_x; + bc->yoff = (float) y0 * recip_v + sub_y; + bc->xoff2 = (x0 + r->w) * recip_h + sub_x; + bc->yoff2 = (y0 + r->h) * recip_v + sub_y; + + if (glyph == 0) + missing_glyph = j; + } else if (spc->skip_missing) { + return_value = 0; + } else if (r->was_packed && r->w == 0 && r->h == 0 && missing_glyph >= 0) { + ranges[i].chardata_for_range[j] = ranges[i].chardata_for_range[missing_glyph]; + } else { + return_value = 0; // if any fail, report failure + } + + ++k; + } + } + + // restore original values + spc->h_oversample = old_h_over; + spc->v_oversample = old_v_over; + + return return_value; +} + +STBTT_DEF void stbtt_PackFontRangesPackRects(stbtt_pack_context *spc, stbrp_rect *rects, int num_rects) +{ + stbrp_pack_rects((stbrp_context *) spc->pack_info, rects, num_rects); +} + +STBTT_DEF int stbtt_PackFontRanges(stbtt_pack_context *spc, const unsigned char *fontdata, int font_index, stbtt_pack_range *ranges, int num_ranges) +{ + stbtt_fontinfo info; + int i,j,n, return_value = 1; + //stbrp_context *context = (stbrp_context *) spc->pack_info; + stbrp_rect *rects; + + // flag all characters as NOT packed + for (i=0; i < num_ranges; ++i) + for (j=0; j < ranges[i].num_chars; ++j) + ranges[i].chardata_for_range[j].x0 = + ranges[i].chardata_for_range[j].y0 = + ranges[i].chardata_for_range[j].x1 = + ranges[i].chardata_for_range[j].y1 = 0; + + n = 0; + for (i=0; i < num_ranges; ++i) + n += ranges[i].num_chars; + + rects = (stbrp_rect *) STBTT_malloc(sizeof(*rects) * n, spc->user_allocator_context); + if (rects == NULL) + return 0; + + info.userdata = spc->user_allocator_context; + stbtt_InitFont(&info, fontdata, stbtt_GetFontOffsetForIndex(fontdata,font_index)); + + n = stbtt_PackFontRangesGatherRects(spc, &info, ranges, num_ranges, rects); + + stbtt_PackFontRangesPackRects(spc, rects, n); + + return_value = stbtt_PackFontRangesRenderIntoRects(spc, &info, ranges, num_ranges, rects); + + STBTT_free(rects, spc->user_allocator_context); + return return_value; +} + +STBTT_DEF int stbtt_PackFontRange(stbtt_pack_context *spc, const unsigned char *fontdata, int font_index, float font_size, + int first_unicode_codepoint_in_range, int num_chars_in_range, stbtt_packedchar *chardata_for_range) +{ + stbtt_pack_range range; + range.first_unicode_codepoint_in_range = first_unicode_codepoint_in_range; + range.array_of_unicode_codepoints = NULL; + range.num_chars = num_chars_in_range; + range.chardata_for_range = chardata_for_range; + range.font_size = font_size; + return stbtt_PackFontRanges(spc, fontdata, font_index, &range, 1); +} + +STBTT_DEF void stbtt_GetScaledFontVMetrics(const unsigned char *fontdata, int index, float size, float *ascent, float *descent, float *lineGap) +{ + int i_ascent, i_descent, i_lineGap; + float scale; + stbtt_fontinfo info; + stbtt_InitFont(&info, fontdata, stbtt_GetFontOffsetForIndex(fontdata, index)); + scale = size > 0 ? stbtt_ScaleForPixelHeight(&info, size) : stbtt_ScaleForMappingEmToPixels(&info, -size); + stbtt_GetFontVMetrics(&info, &i_ascent, &i_descent, &i_lineGap); + *ascent = (float) i_ascent * scale; + *descent = (float) i_descent * scale; + *lineGap = (float) i_lineGap * scale; +} + +STBTT_DEF void stbtt_GetPackedQuad(const stbtt_packedchar *chardata, int pw, int ph, int char_index, float *xpos, float *ypos, stbtt_aligned_quad *q, int align_to_integer) +{ + float ipw = 1.0f / pw, iph = 1.0f / ph; + const stbtt_packedchar *b = chardata + char_index; + + if (align_to_integer) { + float x = (float) STBTT_ifloor((*xpos + b->xoff) + 0.5f); + float y = (float) STBTT_ifloor((*ypos + b->yoff) + 0.5f); + q->x0 = x; + q->y0 = y; + q->x1 = x + b->xoff2 - b->xoff; + q->y1 = y + b->yoff2 - b->yoff; + } else { + q->x0 = *xpos + b->xoff; + q->y0 = *ypos + b->yoff; + q->x1 = *xpos + b->xoff2; + q->y1 = *ypos + b->yoff2; + } + + q->s0 = b->x0 * ipw; + q->t0 = b->y0 * iph; + q->s1 = b->x1 * ipw; + q->t1 = b->y1 * iph; + + *xpos += b->xadvance; +} + +////////////////////////////////////////////////////////////////////////////// +// +// sdf computation +// + +#define STBTT_min(a,b) ((a) < (b) ? (a) : (b)) +#define STBTT_max(a,b) ((a) < (b) ? (b) : (a)) + +static int stbtt__ray_intersect_bezier(float orig[2], float ray[2], float q0[2], float q1[2], float q2[2], float hits[2][2]) +{ + float q0perp = q0[1]*ray[0] - q0[0]*ray[1]; + float q1perp = q1[1]*ray[0] - q1[0]*ray[1]; + float q2perp = q2[1]*ray[0] - q2[0]*ray[1]; + float roperp = orig[1]*ray[0] - orig[0]*ray[1]; + + float a = q0perp - 2*q1perp + q2perp; + float b = q1perp - q0perp; + float c = q0perp - roperp; + + float s0 = 0., s1 = 0.; + int num_s = 0; + + if (a != 0.0) { + float discr = b*b - a*c; + if (discr > 0.0) { + float rcpna = -1 / a; + float d = (float) STBTT_sqrt(discr); + s0 = (b+d) * rcpna; + s1 = (b-d) * rcpna; + if (s0 >= 0.0 && s0 <= 1.0) + num_s = 1; + if (d > 0.0 && s1 >= 0.0 && s1 <= 1.0) { + if (num_s == 0) s0 = s1; + ++num_s; + } + } + } else { + // 2*b*s + c = 0 + // s = -c / (2*b) + s0 = c / (-2 * b); + if (s0 >= 0.0 && s0 <= 1.0) + num_s = 1; + } + + if (num_s == 0) + return 0; + else { + float rcp_len2 = 1 / (ray[0]*ray[0] + ray[1]*ray[1]); + float rayn_x = ray[0] * rcp_len2, rayn_y = ray[1] * rcp_len2; + + float q0d = q0[0]*rayn_x + q0[1]*rayn_y; + float q1d = q1[0]*rayn_x + q1[1]*rayn_y; + float q2d = q2[0]*rayn_x + q2[1]*rayn_y; + float rod = orig[0]*rayn_x + orig[1]*rayn_y; + + float q10d = q1d - q0d; + float q20d = q2d - q0d; + float q0rd = q0d - rod; + + hits[0][0] = q0rd + s0*(2.0f - 2.0f*s0)*q10d + s0*s0*q20d; + hits[0][1] = a*s0+b; + + if (num_s > 1) { + hits[1][0] = q0rd + s1*(2.0f - 2.0f*s1)*q10d + s1*s1*q20d; + hits[1][1] = a*s1+b; + return 2; + } else { + return 1; + } + } +} + +static int equal(float *a, float *b) +{ + return (a[0] == b[0] && a[1] == b[1]); +} + +static int stbtt__compute_crossings_x(float x, float y, int nverts, stbtt_vertex *verts) +{ + int i; + float orig[2], ray[2] = { 1, 0 }; + float y_frac; + int winding = 0; + + // make sure y never passes through a vertex of the shape + y_frac = (float) STBTT_fmod(y, 1.0f); + if (y_frac < 0.01f) + y += 0.01f; + else if (y_frac > 0.99f) + y -= 0.01f; + + orig[0] = x; + orig[1] = y; + + // test a ray from (-infinity,y) to (x,y) + for (i=0; i < nverts; ++i) { + if (verts[i].type == STBTT_vline) { + int x0 = (int) verts[i-1].x, y0 = (int) verts[i-1].y; + int x1 = (int) verts[i ].x, y1 = (int) verts[i ].y; + if (y > STBTT_min(y0,y1) && y < STBTT_max(y0,y1) && x > STBTT_min(x0,x1)) { + float x_inter = (y - y0) / (y1 - y0) * (x1-x0) + x0; + if (x_inter < x) + winding += (y0 < y1) ? 1 : -1; + } + } + if (verts[i].type == STBTT_vcurve) { + int x0 = (int) verts[i-1].x , y0 = (int) verts[i-1].y ; + int x1 = (int) verts[i ].cx, y1 = (int) verts[i ].cy; + int x2 = (int) verts[i ].x , y2 = (int) verts[i ].y ; + int ax = STBTT_min(x0,STBTT_min(x1,x2)), ay = STBTT_min(y0,STBTT_min(y1,y2)); + int by = STBTT_max(y0,STBTT_max(y1,y2)); + if (y > ay && y < by && x > ax) { + float q0[2],q1[2],q2[2]; + float hits[2][2]; + q0[0] = (float)x0; + q0[1] = (float)y0; + q1[0] = (float)x1; + q1[1] = (float)y1; + q2[0] = (float)x2; + q2[1] = (float)y2; + if (equal(q0,q1) || equal(q1,q2)) { + x0 = (int)verts[i-1].x; + y0 = (int)verts[i-1].y; + x1 = (int)verts[i ].x; + y1 = (int)verts[i ].y; + if (y > STBTT_min(y0,y1) && y < STBTT_max(y0,y1) && x > STBTT_min(x0,x1)) { + float x_inter = (y - y0) / (y1 - y0) * (x1-x0) + x0; + if (x_inter < x) + winding += (y0 < y1) ? 1 : -1; + } + } else { + int num_hits = stbtt__ray_intersect_bezier(orig, ray, q0, q1, q2, hits); + if (num_hits >= 1) + if (hits[0][0] < 0) + winding += (hits[0][1] < 0 ? -1 : 1); + if (num_hits >= 2) + if (hits[1][0] < 0) + winding += (hits[1][1] < 0 ? -1 : 1); + } + } + } + } + return winding; +} + +static float stbtt__cuberoot( float x ) +{ + if (x<0) + return -(float) STBTT_pow(-x,1.0f/3.0f); + else + return (float) STBTT_pow( x,1.0f/3.0f); +} + +// x^3 + a*x^2 + b*x + c = 0 +static int stbtt__solve_cubic(float a, float b, float c, float* r) +{ + float s = -a / 3; + float p = b - a*a / 3; + float q = a * (2*a*a - 9*b) / 27 + c; + float p3 = p*p*p; + float d = q*q + 4*p3 / 27; + if (d >= 0) { + float z = (float) STBTT_sqrt(d); + float u = (-q + z) / 2; + float v = (-q - z) / 2; + u = stbtt__cuberoot(u); + v = stbtt__cuberoot(v); + r[0] = s + u + v; + return 1; + } else { + float u = (float) STBTT_sqrt(-p/3); + float v = (float) STBTT_acos(-STBTT_sqrt(-27/p3) * q / 2) / 3; // p3 must be negative, since d is negative + float m = (float) STBTT_cos(v); + float n = (float) STBTT_cos(v-3.141592/2)*1.732050808f; + r[0] = s + u * 2 * m; + r[1] = s - u * (m + n); + r[2] = s - u * (m - n); + + //STBTT_assert( STBTT_fabs(((r[0]+a)*r[0]+b)*r[0]+c) < 0.05f); // these asserts may not be safe at all scales, though they're in bezier t parameter units so maybe? + //STBTT_assert( STBTT_fabs(((r[1]+a)*r[1]+b)*r[1]+c) < 0.05f); + //STBTT_assert( STBTT_fabs(((r[2]+a)*r[2]+b)*r[2]+c) < 0.05f); + return 3; + } +} + +STBTT_DEF unsigned char * stbtt_GetGlyphSDF(const stbtt_fontinfo *info, float scale, int glyph, int padding, unsigned char onedge_value, float pixel_dist_scale, int *width, int *height, int *xoff, int *yoff) +{ + float scale_x = scale, scale_y = scale; + int ix0,iy0,ix1,iy1; + int w,h; + unsigned char *data; + + if (scale == 0) return NULL; + + stbtt_GetGlyphBitmapBoxSubpixel(info, glyph, scale, scale, 0.0f,0.0f, &ix0,&iy0,&ix1,&iy1); + + // if empty, return NULL + if (ix0 == ix1 || iy0 == iy1) + return NULL; + + ix0 -= padding; + iy0 -= padding; + ix1 += padding; + iy1 += padding; + + w = (ix1 - ix0); + h = (iy1 - iy0); + + if (width ) *width = w; + if (height) *height = h; + if (xoff ) *xoff = ix0; + if (yoff ) *yoff = iy0; + + // invert for y-downwards bitmaps + scale_y = -scale_y; + + { + int x,y,i,j; + float *precompute; + stbtt_vertex *verts; + int num_verts = stbtt_GetGlyphShape(info, glyph, &verts); + data = (unsigned char *) STBTT_malloc(w * h, info->userdata); + precompute = (float *) STBTT_malloc(num_verts * sizeof(float), info->userdata); + + for (i=0,j=num_verts-1; i < num_verts; j=i++) { + if (verts[i].type == STBTT_vline) { + float x0 = verts[i].x*scale_x, y0 = verts[i].y*scale_y; + float x1 = verts[j].x*scale_x, y1 = verts[j].y*scale_y; + float dist = (float) STBTT_sqrt((x1-x0)*(x1-x0) + (y1-y0)*(y1-y0)); + precompute[i] = (dist == 0) ? 0.0f : 1.0f / dist; + } else if (verts[i].type == STBTT_vcurve) { + float x2 = verts[j].x *scale_x, y2 = verts[j].y *scale_y; + float x1 = verts[i].cx*scale_x, y1 = verts[i].cy*scale_y; + float x0 = verts[i].x *scale_x, y0 = verts[i].y *scale_y; + float bx = x0 - 2*x1 + x2, by = y0 - 2*y1 + y2; + float len2 = bx*bx + by*by; + if (len2 != 0.0f) + precompute[i] = 1.0f / (bx*bx + by*by); + else + precompute[i] = 0.0f; + } else + precompute[i] = 0.0f; + } + + for (y=iy0; y < iy1; ++y) { + for (x=ix0; x < ix1; ++x) { + float val; + float min_dist = 999999.0f; + float sx = (float) x + 0.5f; + float sy = (float) y + 0.5f; + float x_gspace = (sx / scale_x); + float y_gspace = (sy / scale_y); + + int winding = stbtt__compute_crossings_x(x_gspace, y_gspace, num_verts, verts); // @OPTIMIZE: this could just be a rasterization, but needs to be line vs. non-tesselated curves so a new path + + for (i=0; i < num_verts; ++i) { + float x0 = verts[i].x*scale_x, y0 = verts[i].y*scale_y; + + if (verts[i].type == STBTT_vline && precompute[i] != 0.0f) { + float x1 = verts[i-1].x*scale_x, y1 = verts[i-1].y*scale_y; + + float dist,dist2 = (x0-sx)*(x0-sx) + (y0-sy)*(y0-sy); + if (dist2 < min_dist*min_dist) + min_dist = (float) STBTT_sqrt(dist2); + + // coarse culling against bbox + //if (sx > STBTT_min(x0,x1)-min_dist && sx < STBTT_max(x0,x1)+min_dist && + // sy > STBTT_min(y0,y1)-min_dist && sy < STBTT_max(y0,y1)+min_dist) + dist = (float) STBTT_fabs((x1-x0)*(y0-sy) - (y1-y0)*(x0-sx)) * precompute[i]; + STBTT_assert(i != 0); + if (dist < min_dist) { + // check position along line + // x' = x0 + t*(x1-x0), y' = y0 + t*(y1-y0) + // minimize (x'-sx)*(x'-sx)+(y'-sy)*(y'-sy) + float dx = x1-x0, dy = y1-y0; + float px = x0-sx, py = y0-sy; + // minimize (px+t*dx)^2 + (py+t*dy)^2 = px*px + 2*px*dx*t + t^2*dx*dx + py*py + 2*py*dy*t + t^2*dy*dy + // derivative: 2*px*dx + 2*py*dy + (2*dx*dx+2*dy*dy)*t, set to 0 and solve + float t = -(px*dx + py*dy) / (dx*dx + dy*dy); + if (t >= 0.0f && t <= 1.0f) + min_dist = dist; + } + } else if (verts[i].type == STBTT_vcurve) { + float x2 = verts[i-1].x *scale_x, y2 = verts[i-1].y *scale_y; + float x1 = verts[i ].cx*scale_x, y1 = verts[i ].cy*scale_y; + float box_x0 = STBTT_min(STBTT_min(x0,x1),x2); + float box_y0 = STBTT_min(STBTT_min(y0,y1),y2); + float box_x1 = STBTT_max(STBTT_max(x0,x1),x2); + float box_y1 = STBTT_max(STBTT_max(y0,y1),y2); + // coarse culling against bbox to avoid computing cubic unnecessarily + if (sx > box_x0-min_dist && sx < box_x1+min_dist && sy > box_y0-min_dist && sy < box_y1+min_dist) { + int num=0; + float ax = x1-x0, ay = y1-y0; + float bx = x0 - 2*x1 + x2, by = y0 - 2*y1 + y2; + float mx = x0 - sx, my = y0 - sy; + float res[3] = {0.f,0.f,0.f}; + float px,py,t,it,dist2; + float a_inv = precompute[i]; + if (a_inv == 0.0) { // if a_inv is 0, it's 2nd degree so use quadratic formula + float a = 3*(ax*bx + ay*by); + float b = 2*(ax*ax + ay*ay) + (mx*bx+my*by); + float c = mx*ax+my*ay; + if (a == 0.0) { // if a is 0, it's linear + if (b != 0.0) { + res[num++] = -c/b; + } + } else { + float discriminant = b*b - 4*a*c; + if (discriminant < 0) + num = 0; + else { + float root = (float) STBTT_sqrt(discriminant); + res[0] = (-b - root)/(2*a); + res[1] = (-b + root)/(2*a); + num = 2; // don't bother distinguishing 1-solution case, as code below will still work + } + } + } else { + float b = 3*(ax*bx + ay*by) * a_inv; // could precompute this as it doesn't depend on sample point + float c = (2*(ax*ax + ay*ay) + (mx*bx+my*by)) * a_inv; + float d = (mx*ax+my*ay) * a_inv; + num = stbtt__solve_cubic(b, c, d, res); + } + dist2 = (x0-sx)*(x0-sx) + (y0-sy)*(y0-sy); + if (dist2 < min_dist*min_dist) + min_dist = (float) STBTT_sqrt(dist2); + + if (num >= 1 && res[0] >= 0.0f && res[0] <= 1.0f) { + t = res[0], it = 1.0f - t; + px = it*it*x0 + 2*t*it*x1 + t*t*x2; + py = it*it*y0 + 2*t*it*y1 + t*t*y2; + dist2 = (px-sx)*(px-sx) + (py-sy)*(py-sy); + if (dist2 < min_dist * min_dist) + min_dist = (float) STBTT_sqrt(dist2); + } + if (num >= 2 && res[1] >= 0.0f && res[1] <= 1.0f) { + t = res[1], it = 1.0f - t; + px = it*it*x0 + 2*t*it*x1 + t*t*x2; + py = it*it*y0 + 2*t*it*y1 + t*t*y2; + dist2 = (px-sx)*(px-sx) + (py-sy)*(py-sy); + if (dist2 < min_dist * min_dist) + min_dist = (float) STBTT_sqrt(dist2); + } + if (num >= 3 && res[2] >= 0.0f && res[2] <= 1.0f) { + t = res[2], it = 1.0f - t; + px = it*it*x0 + 2*t*it*x1 + t*t*x2; + py = it*it*y0 + 2*t*it*y1 + t*t*y2; + dist2 = (px-sx)*(px-sx) + (py-sy)*(py-sy); + if (dist2 < min_dist * min_dist) + min_dist = (float) STBTT_sqrt(dist2); + } + } + } + } + if (winding == 0) + min_dist = -min_dist; // if outside the shape, value is negative + val = onedge_value + pixel_dist_scale * min_dist; + if (val < 0) + val = 0; + else if (val > 255) + val = 255; + data[(y-iy0)*w+(x-ix0)] = (unsigned char) val; + } + } + STBTT_free(precompute, info->userdata); + STBTT_free(verts, info->userdata); + } + return data; +} + +STBTT_DEF unsigned char * stbtt_GetCodepointSDF(const stbtt_fontinfo *info, float scale, int codepoint, int padding, unsigned char onedge_value, float pixel_dist_scale, int *width, int *height, int *xoff, int *yoff) +{ + return stbtt_GetGlyphSDF(info, scale, stbtt_FindGlyphIndex(info, codepoint), padding, onedge_value, pixel_dist_scale, width, height, xoff, yoff); +} + +STBTT_DEF void stbtt_FreeSDF(unsigned char *bitmap, void *userdata) +{ + STBTT_free(bitmap, userdata); +} + +////////////////////////////////////////////////////////////////////////////// +// +// font name matching -- recommended not to use this +// + +// check if a utf8 string contains a prefix which is the utf16 string; if so return length of matching utf8 string +static stbtt_int32 stbtt__CompareUTF8toUTF16_bigendian_prefix(stbtt_uint8 *s1, stbtt_int32 len1, stbtt_uint8 *s2, stbtt_int32 len2) +{ + stbtt_int32 i=0; + + // convert utf16 to utf8 and compare the results while converting + while (len2) { + stbtt_uint16 ch = s2[0]*256 + s2[1]; + if (ch < 0x80) { + if (i >= len1) return -1; + if (s1[i++] != ch) return -1; + } else if (ch < 0x800) { + if (i+1 >= len1) return -1; + if (s1[i++] != 0xc0 + (ch >> 6)) return -1; + if (s1[i++] != 0x80 + (ch & 0x3f)) return -1; + } else if (ch >= 0xd800 && ch < 0xdc00) { + stbtt_uint32 c; + stbtt_uint16 ch2 = s2[2]*256 + s2[3]; + if (i+3 >= len1) return -1; + c = ((ch - 0xd800) << 10) + (ch2 - 0xdc00) + 0x10000; + if (s1[i++] != 0xf0 + (c >> 18)) return -1; + if (s1[i++] != 0x80 + ((c >> 12) & 0x3f)) return -1; + if (s1[i++] != 0x80 + ((c >> 6) & 0x3f)) return -1; + if (s1[i++] != 0x80 + ((c ) & 0x3f)) return -1; + s2 += 2; // plus another 2 below + len2 -= 2; + } else if (ch >= 0xdc00 && ch < 0xe000) { + return -1; + } else { + if (i+2 >= len1) return -1; + if (s1[i++] != 0xe0 + (ch >> 12)) return -1; + if (s1[i++] != 0x80 + ((ch >> 6) & 0x3f)) return -1; + if (s1[i++] != 0x80 + ((ch ) & 0x3f)) return -1; + } + s2 += 2; + len2 -= 2; + } + return i; +} + +static int stbtt_CompareUTF8toUTF16_bigendian_internal(char *s1, int len1, char *s2, int len2) +{ + return len1 == stbtt__CompareUTF8toUTF16_bigendian_prefix((stbtt_uint8*) s1, len1, (stbtt_uint8*) s2, len2); +} + +// returns results in whatever encoding you request... but note that 2-byte encodings +// will be BIG-ENDIAN... use stbtt_CompareUTF8toUTF16_bigendian() to compare +STBTT_DEF const char *stbtt_GetFontNameString(const stbtt_fontinfo *font, int *length, int platformID, int encodingID, int languageID, int nameID) +{ + stbtt_int32 i,count,stringOffset; + stbtt_uint8 *fc = font->data; + stbtt_uint32 offset = font->fontstart; + stbtt_uint32 nm = stbtt__find_table(fc, offset, "name"); + if (!nm) return NULL; + + count = ttUSHORT(fc+nm+2); + stringOffset = nm + ttUSHORT(fc+nm+4); + for (i=0; i < count; ++i) { + stbtt_uint32 loc = nm + 6 + 12 * i; + if (platformID == ttUSHORT(fc+loc+0) && encodingID == ttUSHORT(fc+loc+2) + && languageID == ttUSHORT(fc+loc+4) && nameID == ttUSHORT(fc+loc+6)) { + *length = ttUSHORT(fc+loc+8); + return (const char *) (fc+stringOffset+ttUSHORT(fc+loc+10)); + } + } + return NULL; +} + +static int stbtt__matchpair(stbtt_uint8 *fc, stbtt_uint32 nm, stbtt_uint8 *name, stbtt_int32 nlen, stbtt_int32 target_id, stbtt_int32 next_id) +{ + stbtt_int32 i; + stbtt_int32 count = ttUSHORT(fc+nm+2); + stbtt_int32 stringOffset = nm + ttUSHORT(fc+nm+4); + + for (i=0; i < count; ++i) { + stbtt_uint32 loc = nm + 6 + 12 * i; + stbtt_int32 id = ttUSHORT(fc+loc+6); + if (id == target_id) { + // find the encoding + stbtt_int32 platform = ttUSHORT(fc+loc+0), encoding = ttUSHORT(fc+loc+2), language = ttUSHORT(fc+loc+4); + + // is this a Unicode encoding? + if (platform == 0 || (platform == 3 && encoding == 1) || (platform == 3 && encoding == 10)) { + stbtt_int32 slen = ttUSHORT(fc+loc+8); + stbtt_int32 off = ttUSHORT(fc+loc+10); + + // check if there's a prefix match + stbtt_int32 matchlen = stbtt__CompareUTF8toUTF16_bigendian_prefix(name, nlen, fc+stringOffset+off,slen); + if (matchlen >= 0) { + // check for target_id+1 immediately following, with same encoding & language + if (i+1 < count && ttUSHORT(fc+loc+12+6) == next_id && ttUSHORT(fc+loc+12) == platform && ttUSHORT(fc+loc+12+2) == encoding && ttUSHORT(fc+loc+12+4) == language) { + slen = ttUSHORT(fc+loc+12+8); + off = ttUSHORT(fc+loc+12+10); + if (slen == 0) { + if (matchlen == nlen) + return 1; + } else if (matchlen < nlen && name[matchlen] == ' ') { + ++matchlen; + if (stbtt_CompareUTF8toUTF16_bigendian_internal((char*) (name+matchlen), nlen-matchlen, (char*)(fc+stringOffset+off),slen)) + return 1; + } + } else { + // if nothing immediately following + if (matchlen == nlen) + return 1; + } + } + } + + // @TODO handle other encodings + } + } + return 0; +} + +static int stbtt__matches(stbtt_uint8 *fc, stbtt_uint32 offset, stbtt_uint8 *name, stbtt_int32 flags) +{ + stbtt_int32 nlen = (stbtt_int32) STBTT_strlen((char *) name); + stbtt_uint32 nm,hd; + if (!stbtt__isfont(fc+offset)) return 0; + + // check italics/bold/underline flags in macStyle... + if (flags) { + hd = stbtt__find_table(fc, offset, "head"); + if ((ttUSHORT(fc+hd+44) & 7) != (flags & 7)) return 0; + } + + nm = stbtt__find_table(fc, offset, "name"); + if (!nm) return 0; + + if (flags) { + // if we checked the macStyle flags, then just check the family and ignore the subfamily + if (stbtt__matchpair(fc, nm, name, nlen, 16, -1)) return 1; + if (stbtt__matchpair(fc, nm, name, nlen, 1, -1)) return 1; + if (stbtt__matchpair(fc, nm, name, nlen, 3, -1)) return 1; + } else { + if (stbtt__matchpair(fc, nm, name, nlen, 16, 17)) return 1; + if (stbtt__matchpair(fc, nm, name, nlen, 1, 2)) return 1; + if (stbtt__matchpair(fc, nm, name, nlen, 3, -1)) return 1; + } + + return 0; +} + +static int stbtt_FindMatchingFont_internal(unsigned char *font_collection, char *name_utf8, stbtt_int32 flags) +{ + stbtt_int32 i; + for (i=0;;++i) { + stbtt_int32 off = stbtt_GetFontOffsetForIndex(font_collection, i); + if (off < 0) return off; + if (stbtt__matches((stbtt_uint8 *) font_collection, off, (stbtt_uint8*) name_utf8, flags)) + return off; + } +} + +#if defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wcast-qual" +#endif + +STBTT_DEF int stbtt_BakeFontBitmap(const unsigned char *data, int offset, + float pixel_height, unsigned char *pixels, int pw, int ph, + int first_char, int num_chars, stbtt_bakedchar *chardata) +{ + return stbtt_BakeFontBitmap_internal((unsigned char *) data, offset, pixel_height, pixels, pw, ph, first_char, num_chars, chardata); +} + +STBTT_DEF int stbtt_GetFontOffsetForIndex(const unsigned char *data, int index) +{ + return stbtt_GetFontOffsetForIndex_internal((unsigned char *) data, index); +} + +STBTT_DEF int stbtt_GetNumberOfFonts(const unsigned char *data) +{ + return stbtt_GetNumberOfFonts_internal((unsigned char *) data); +} + +STBTT_DEF int stbtt_InitFont(stbtt_fontinfo *info, const unsigned char *data, int offset) +{ + return stbtt_InitFont_internal(info, (unsigned char *) data, offset); +} + +STBTT_DEF int stbtt_FindMatchingFont(const unsigned char *fontdata, const char *name, int flags) +{ + return stbtt_FindMatchingFont_internal((unsigned char *) fontdata, (char *) name, flags); +} + +STBTT_DEF int stbtt_CompareUTF8toUTF16_bigendian(const char *s1, int len1, const char *s2, int len2) +{ + return stbtt_CompareUTF8toUTF16_bigendian_internal((char *) s1, len1, (char *) s2, len2); +} + +#if defined(__GNUC__) || defined(__clang__) +#pragma GCC diagnostic pop +#endif + +#endif // STB_TRUETYPE_IMPLEMENTATION + + +// FULL VERSION HISTORY +// +// 1.25 (2021-07-11) many fixes +// 1.24 (2020-02-05) fix warning +// 1.23 (2020-02-02) query SVG data for glyphs; query whole kerning table (but only kern not GPOS) +// 1.22 (2019-08-11) minimize missing-glyph duplication; fix kerning if both 'GPOS' and 'kern' are defined +// 1.21 (2019-02-25) fix warning +// 1.20 (2019-02-07) PackFontRange skips missing codepoints; GetScaleFontVMetrics() +// 1.19 (2018-02-11) OpenType GPOS kerning (horizontal only), STBTT_fmod +// 1.18 (2018-01-29) add missing function +// 1.17 (2017-07-23) make more arguments const; doc fix +// 1.16 (2017-07-12) SDF support +// 1.15 (2017-03-03) make more arguments const +// 1.14 (2017-01-16) num-fonts-in-TTC function +// 1.13 (2017-01-02) support OpenType fonts, certain Apple fonts +// 1.12 (2016-10-25) suppress warnings about casting away const with -Wcast-qual +// 1.11 (2016-04-02) fix unused-variable warning +// 1.10 (2016-04-02) allow user-defined fabs() replacement +// fix memory leak if fontsize=0.0 +// fix warning from duplicate typedef +// 1.09 (2016-01-16) warning fix; avoid crash on outofmem; use alloc userdata for PackFontRanges +// 1.08 (2015-09-13) document stbtt_Rasterize(); fixes for vertical & horizontal edges +// 1.07 (2015-08-01) allow PackFontRanges to accept arrays of sparse codepoints; +// allow PackFontRanges to pack and render in separate phases; +// fix stbtt_GetFontOFfsetForIndex (never worked for non-0 input?); +// fixed an assert() bug in the new rasterizer +// replace assert() with STBTT_assert() in new rasterizer +// 1.06 (2015-07-14) performance improvements (~35% faster on x86 and x64 on test machine) +// also more precise AA rasterizer, except if shapes overlap +// remove need for STBTT_sort +// 1.05 (2015-04-15) fix misplaced definitions for STBTT_STATIC +// 1.04 (2015-04-15) typo in example +// 1.03 (2015-04-12) STBTT_STATIC, fix memory leak in new packing, various fixes +// 1.02 (2014-12-10) fix various warnings & compile issues w/ stb_rect_pack, C++ +// 1.01 (2014-12-08) fix subpixel position when oversampling to exactly match +// non-oversampled; STBTT_POINT_SIZE for packed case only +// 1.00 (2014-12-06) add new PackBegin etc. API, w/ support for oversampling +// 0.99 (2014-09-18) fix multiple bugs with subpixel rendering (ryg) +// 0.9 (2014-08-07) support certain mac/iOS fonts without an MS platformID +// 0.8b (2014-07-07) fix a warning +// 0.8 (2014-05-25) fix a few more warnings +// 0.7 (2013-09-25) bugfix: subpixel glyph bug fixed in 0.5 had come back +// 0.6c (2012-07-24) improve documentation +// 0.6b (2012-07-20) fix a few more warnings +// 0.6 (2012-07-17) fix warnings; added stbtt_ScaleForMappingEmToPixels, +// stbtt_GetFontBoundingBox, stbtt_IsGlyphEmpty +// 0.5 (2011-12-09) bugfixes: +// subpixel glyph renderer computed wrong bounding box +// first vertex of shape can be off-curve (FreeSans) +// 0.4b (2011-12-03) fixed an error in the font baking example +// 0.4 (2011-12-01) kerning, subpixel rendering (tor) +// bugfixes for: +// codepoint-to-glyph conversion using table fmt=12 +// codepoint-to-glyph conversion using table fmt=4 +// stbtt_GetBakedQuad with non-square texture (Zer) +// updated Hello World! sample to use kerning and subpixel +// fixed some warnings +// 0.3 (2009-06-24) cmap fmt=12, compound shapes (MM) +// userdata, malloc-from-userdata, non-zero fill (stb) +// 0.2 (2009-03-11) Fix unsigned/signed char warnings +// 0.1 (2009-03-09) First public release +// + +/* +------------------------------------------------------------------------------ +This software is available under 2 licenses -- choose whichever you prefer. +------------------------------------------------------------------------------ +ALTERNATIVE A - MIT License +Copyright (c) 2017 Sean Barrett +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +------------------------------------------------------------------------------ +ALTERNATIVE B - Public Domain (www.unlicense.org) +This is free and unencumbered software released into the public domain. +Anyone is free to copy, modify, publish, use, compile, sell, or distribute this +software, either in source code form or as a compiled binary, for any purpose, +commercial or non-commercial, and by any means. +In jurisdictions that recognize copyright laws, the author or authors of this +software dedicate any and all copyright interest in the software to the public +domain. We make this dedication for the benefit of the public at large and to +the detriment of our heirs and successors. We intend this dedication to be an +overt act of relinquishment in perpetuity of all present and future rights to +this software under copyright law. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +------------------------------------------------------------------------------ +*/ diff --git a/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std.h b/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std.h new file mode 100644 index 0000000..ef7eaf7 --- /dev/null +++ b/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std.h @@ -0,0 +1,312 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_H_ +#define VULKAN_VIDEO_CODEC_H264STD_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_H264_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6 +#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16 +#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 6 +#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS 64 +#define STD_VIDEO_H264_MAX_NUM_LIST_REF 32 +#define STD_VIDEO_H264_MAX_CHROMA_PLANES 2 +#define STD_VIDEO_H264_NO_REFERENCE_PICTURE 0xFF + +typedef enum StdVideoH264ChromaFormatIdc { + STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME = 0, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_420 = 1, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_422 = 2, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_444 = 3, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ChromaFormatIdc; + +typedef enum StdVideoH264ProfileIdc { + STD_VIDEO_H264_PROFILE_IDC_BASELINE = 66, + STD_VIDEO_H264_PROFILE_IDC_MAIN = 77, + STD_VIDEO_H264_PROFILE_IDC_HIGH = 100, + STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE = 244, + STD_VIDEO_H264_PROFILE_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ProfileIdc; + +typedef enum StdVideoH264LevelIdc { + STD_VIDEO_H264_LEVEL_IDC_1_0 = 0, + STD_VIDEO_H264_LEVEL_IDC_1_1 = 1, + STD_VIDEO_H264_LEVEL_IDC_1_2 = 2, + STD_VIDEO_H264_LEVEL_IDC_1_3 = 3, + STD_VIDEO_H264_LEVEL_IDC_2_0 = 4, + STD_VIDEO_H264_LEVEL_IDC_2_1 = 5, + STD_VIDEO_H264_LEVEL_IDC_2_2 = 6, + STD_VIDEO_H264_LEVEL_IDC_3_0 = 7, + STD_VIDEO_H264_LEVEL_IDC_3_1 = 8, + STD_VIDEO_H264_LEVEL_IDC_3_2 = 9, + STD_VIDEO_H264_LEVEL_IDC_4_0 = 10, + STD_VIDEO_H264_LEVEL_IDC_4_1 = 11, + STD_VIDEO_H264_LEVEL_IDC_4_2 = 12, + STD_VIDEO_H264_LEVEL_IDC_5_0 = 13, + STD_VIDEO_H264_LEVEL_IDC_5_1 = 14, + STD_VIDEO_H264_LEVEL_IDC_5_2 = 15, + STD_VIDEO_H264_LEVEL_IDC_6_0 = 16, + STD_VIDEO_H264_LEVEL_IDC_6_1 = 17, + STD_VIDEO_H264_LEVEL_IDC_6_2 = 18, + STD_VIDEO_H264_LEVEL_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264LevelIdc; + +typedef enum StdVideoH264PocType { + STD_VIDEO_H264_POC_TYPE_0 = 0, + STD_VIDEO_H264_POC_TYPE_1 = 1, + STD_VIDEO_H264_POC_TYPE_2 = 2, + STD_VIDEO_H264_POC_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_POC_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264PocType; + +typedef enum StdVideoH264AspectRatioIdc { + STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED = 0, + STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE = 1, + STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11 = 2, + STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11 = 3, + STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11 = 4, + STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33 = 5, + STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11 = 6, + STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11 = 7, + STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11 = 8, + STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33 = 9, + STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11 = 10, + STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11 = 11, + STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33 = 12, + STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99 = 13, + STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3 = 14, + STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2 = 15, + STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1 = 16, + STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR = 255, + STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264AspectRatioIdc; + +typedef enum StdVideoH264WeightedBipredIdc { + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT = 0, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT = 1, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT = 2, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264WeightedBipredIdc; + +typedef enum StdVideoH264ModificationOfPicNumsIdc { + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT = 0, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD = 1, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM = 2, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END = 3, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ModificationOfPicNumsIdc; + +typedef enum StdVideoH264MemMgmtControlOp { + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END = 0, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM = 1, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM = 2, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM = 3, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX = 4, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL = 5, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM = 6, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264MemMgmtControlOp; + +typedef enum StdVideoH264CabacInitIdc { + STD_VIDEO_H264_CABAC_INIT_IDC_0 = 0, + STD_VIDEO_H264_CABAC_INIT_IDC_1 = 1, + STD_VIDEO_H264_CABAC_INIT_IDC_2 = 2, + STD_VIDEO_H264_CABAC_INIT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_CABAC_INIT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264CabacInitIdc; + +typedef enum StdVideoH264DisableDeblockingFilterIdc { + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED = 0, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED = 1, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL = 2, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264DisableDeblockingFilterIdc; + +typedef enum StdVideoH264SliceType { + STD_VIDEO_H264_SLICE_TYPE_P = 0, + STD_VIDEO_H264_SLICE_TYPE_B = 1, + STD_VIDEO_H264_SLICE_TYPE_I = 2, + STD_VIDEO_H264_SLICE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264SliceType; + +typedef enum StdVideoH264PictureType { + STD_VIDEO_H264_PICTURE_TYPE_P = 0, + STD_VIDEO_H264_PICTURE_TYPE_B = 1, + STD_VIDEO_H264_PICTURE_TYPE_I = 2, + STD_VIDEO_H264_PICTURE_TYPE_IDR = 5, + STD_VIDEO_H264_PICTURE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264PictureType; + +typedef enum StdVideoH264NonVclNaluType { + STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS = 0, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS = 1, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD = 2, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX = 3, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE = 4, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM = 5, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED = 6, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264NonVclNaluType; +typedef struct StdVideoH264SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t color_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t timing_info_present_flag : 1; + uint32_t fixed_frame_rate_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; +} StdVideoH264SpsVuiFlags; + +typedef struct StdVideoH264HrdParameters { + uint8_t cpb_cnt_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t reserved1; + uint32_t bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint8_t cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint32_t initial_cpb_removal_delay_length_minus1; + uint32_t cpb_removal_delay_length_minus1; + uint32_t dpb_output_delay_length_minus1; + uint32_t time_offset_length; +} StdVideoH264HrdParameters; + +typedef struct StdVideoH264SequenceParameterSetVui { + StdVideoH264SpsVuiFlags flags; + StdVideoH264AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; + uint32_t num_units_in_tick; + uint32_t time_scale; + uint8_t max_num_reorder_frames; + uint8_t max_dec_frame_buffering; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint32_t reserved1; + const StdVideoH264HrdParameters* pHrdParameters; +} StdVideoH264SequenceParameterSetVui; + +typedef struct StdVideoH264SpsFlags { + uint32_t constraint_set0_flag : 1; + uint32_t constraint_set1_flag : 1; + uint32_t constraint_set2_flag : 1; + uint32_t constraint_set3_flag : 1; + uint32_t constraint_set4_flag : 1; + uint32_t constraint_set5_flag : 1; + uint32_t direct_8x8_inference_flag : 1; + uint32_t mb_adaptive_frame_field_flag : 1; + uint32_t frame_mbs_only_flag : 1; + uint32_t delta_pic_order_always_zero_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t gaps_in_frame_num_value_allowed_flag : 1; + uint32_t qpprime_y_zero_transform_bypass_flag : 1; + uint32_t frame_cropping_flag : 1; + uint32_t seq_scaling_matrix_present_flag : 1; + uint32_t vui_parameters_present_flag : 1; +} StdVideoH264SpsFlags; + +typedef struct StdVideoH264ScalingLists { + uint16_t scaling_list_present_mask; + uint16_t use_default_scaling_matrix_mask; + uint8_t ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS]; + uint8_t ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS]; +} StdVideoH264ScalingLists; + +typedef struct StdVideoH264SequenceParameterSet { + StdVideoH264SpsFlags flags; + StdVideoH264ProfileIdc profile_idc; + StdVideoH264LevelIdc level_idc; + StdVideoH264ChromaFormatIdc chroma_format_idc; + uint8_t seq_parameter_set_id; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_frame_num_minus4; + StdVideoH264PocType pic_order_cnt_type; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t num_ref_frames_in_pic_order_cnt_cycle; + uint8_t max_num_ref_frames; + uint8_t reserved1; + uint32_t pic_width_in_mbs_minus1; + uint32_t pic_height_in_map_units_minus1; + uint32_t frame_crop_left_offset; + uint32_t frame_crop_right_offset; + uint32_t frame_crop_top_offset; + uint32_t frame_crop_bottom_offset; + uint32_t reserved2; + const int32_t* pOffsetForRefFrame; + const StdVideoH264ScalingLists* pScalingLists; + const StdVideoH264SequenceParameterSetVui* pSequenceParameterSetVui; +} StdVideoH264SequenceParameterSet; + +typedef struct StdVideoH264PpsFlags { + uint32_t transform_8x8_mode_flag : 1; + uint32_t redundant_pic_cnt_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t bottom_field_pic_order_in_frame_present_flag : 1; + uint32_t entropy_coding_mode_flag : 1; + uint32_t pic_scaling_matrix_present_flag : 1; +} StdVideoH264PpsFlags; + +typedef struct StdVideoH264PictureParameterSet { + StdVideoH264PpsFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + StdVideoH264WeightedBipredIdc weighted_bipred_idc; + int8_t pic_init_qp_minus26; + int8_t pic_init_qs_minus26; + int8_t chroma_qp_index_offset; + int8_t second_chroma_qp_index_offset; + const StdVideoH264ScalingLists* pScalingLists; +} StdVideoH264PictureParameterSet; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std_decode.h b/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std_decode.h new file mode 100644 index 0000000..dd24112 --- /dev/null +++ b/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std_decode.h @@ -0,0 +1,77 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std_decode 1 +#include "vulkan_video_codec_h264std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode" +#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2 + +typedef enum StdVideoDecodeH264FieldOrderCount { + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM = 1, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID = 0x7FFFFFFF, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_MAX_ENUM = 0x7FFFFFFF +} StdVideoDecodeH264FieldOrderCount; +typedef struct StdVideoDecodeH264PictureInfoFlags { + uint32_t field_pic_flag : 1; + uint32_t is_intra : 1; + uint32_t IdrPicFlag : 1; + uint32_t bottom_field_flag : 1; + uint32_t is_reference : 1; + uint32_t complementary_field_pair : 1; +} StdVideoDecodeH264PictureInfoFlags; + +typedef struct StdVideoDecodeH264PictureInfo { + StdVideoDecodeH264PictureInfoFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t reserved1; + uint8_t reserved2; + uint16_t frame_num; + uint16_t idr_pic_id; + int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE]; +} StdVideoDecodeH264PictureInfo; + +typedef struct StdVideoDecodeH264ReferenceInfoFlags { + uint32_t top_field_flag : 1; + uint32_t bottom_field_flag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t is_non_existing : 1; +} StdVideoDecodeH264ReferenceInfoFlags; + +typedef struct StdVideoDecodeH264ReferenceInfo { + StdVideoDecodeH264ReferenceInfoFlags flags; + uint16_t FrameNum; + uint16_t reserved; + int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE]; +} StdVideoDecodeH264ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std_encode.h b/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std_encode.h new file mode 100644 index 0000000..366666a --- /dev/null +++ b/MacroLibX/third_party/vk_video/vulkan_video_codec_h264std_encode.h @@ -0,0 +1,147 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std_encode 1 +#include "vulkan_video_codec_h264std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode" +typedef struct StdVideoEncodeH264WeightTableFlags { + uint32_t luma_weight_l0_flag; + uint32_t chroma_weight_l0_flag; + uint32_t luma_weight_l1_flag; + uint32_t chroma_weight_l1_flag; +} StdVideoEncodeH264WeightTableFlags; + +typedef struct StdVideoEncodeH264WeightTable { + StdVideoEncodeH264WeightTableFlags flags; + uint8_t luma_log2_weight_denom; + uint8_t chroma_log2_weight_denom; + int8_t luma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t luma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t chroma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t chroma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t luma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t luma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t chroma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t chroma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; +} StdVideoEncodeH264WeightTable; + +typedef struct StdVideoEncodeH264SliceHeaderFlags { + uint32_t direct_spatial_mv_pred_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t reserved : 30; +} StdVideoEncodeH264SliceHeaderFlags; + +typedef struct StdVideoEncodeH264PictureInfoFlags { + uint32_t IdrPicFlag : 1; + uint32_t is_reference : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t long_term_reference_flag : 1; + uint32_t adaptive_ref_pic_marking_mode_flag : 1; + uint32_t reserved : 27; +} StdVideoEncodeH264PictureInfoFlags; + +typedef struct StdVideoEncodeH264ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t reserved : 31; +} StdVideoEncodeH264ReferenceInfoFlags; + +typedef struct StdVideoEncodeH264ReferenceListsInfoFlags { + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; +} StdVideoEncodeH264ReferenceListsInfoFlags; + +typedef struct StdVideoEncodeH264RefListModEntry { + StdVideoH264ModificationOfPicNumsIdc modification_of_pic_nums_idc; + uint16_t abs_diff_pic_num_minus1; + uint16_t long_term_pic_num; +} StdVideoEncodeH264RefListModEntry; + +typedef struct StdVideoEncodeH264RefPicMarkingEntry { + StdVideoH264MemMgmtControlOp memory_management_control_operation; + uint16_t difference_of_pic_nums_minus1; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint16_t max_long_term_frame_idx_plus1; +} StdVideoEncodeH264RefPicMarkingEntry; + +typedef struct StdVideoEncodeH264ReferenceListsInfo { + StdVideoEncodeH264ReferenceListsInfoFlags flags; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + uint8_t RefPicList0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + uint8_t RefPicList1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + uint8_t refList0ModOpCount; + uint8_t refList1ModOpCount; + uint8_t refPicMarkingOpCount; + uint8_t reserved1[7]; + const StdVideoEncodeH264RefListModEntry* pRefList0ModOperations; + const StdVideoEncodeH264RefListModEntry* pRefList1ModOperations; + const StdVideoEncodeH264RefPicMarkingEntry* pRefPicMarkingOperations; +} StdVideoEncodeH264ReferenceListsInfo; + +typedef struct StdVideoEncodeH264PictureInfo { + StdVideoEncodeH264PictureInfoFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint16_t idr_pic_id; + StdVideoH264PictureType primary_pic_type; + uint32_t frame_num; + int32_t PicOrderCnt; + uint8_t temporal_id; + uint8_t reserved1[3]; + const StdVideoEncodeH264ReferenceListsInfo* pRefLists; +} StdVideoEncodeH264PictureInfo; + +typedef struct StdVideoEncodeH264ReferenceInfo { + StdVideoEncodeH264ReferenceInfoFlags flags; + StdVideoH264PictureType primary_pic_type; + uint32_t FrameNum; + int32_t PicOrderCnt; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint8_t temporal_id; +} StdVideoEncodeH264ReferenceInfo; + +typedef struct StdVideoEncodeH264SliceHeader { + StdVideoEncodeH264SliceHeaderFlags flags; + uint32_t first_mb_in_slice; + StdVideoH264SliceType slice_type; + int8_t slice_alpha_c0_offset_div2; + int8_t slice_beta_offset_div2; + int8_t slice_qp_delta; + uint8_t reserved1; + StdVideoH264CabacInitIdc cabac_init_idc; + StdVideoH264DisableDeblockingFilterIdc disable_deblocking_filter_idc; + const StdVideoEncodeH264WeightTable* pWeightTable; +} StdVideoEncodeH264SliceHeader; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std.h b/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std.h new file mode 100644 index 0000000..ff5d0da --- /dev/null +++ b/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std.h @@ -0,0 +1,446 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_H_ +#define VULKAN_VIDEO_CODEC_H265STD_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_H265_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7 +#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16 +#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS 2 +#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128 +#define STD_VIDEO_H265_MAX_NUM_LIST_REF 15 +#define STD_VIDEO_H265_MAX_CHROMA_PLANES 2 +#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64 +#define STD_VIDEO_H265_MAX_DPB_SIZE 16 +#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32 +#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16 +#define STD_VIDEO_H265_MAX_DELTA_POC 48 +#define STD_VIDEO_H265_NO_REFERENCE_PICTURE 0xFF + +typedef enum StdVideoH265ChromaFormatIdc { + STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME = 0, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_420 = 1, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_422 = 2, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_444 = 3, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265ChromaFormatIdc; + +typedef enum StdVideoH265ProfileIdc { + STD_VIDEO_H265_PROFILE_IDC_MAIN = 1, + STD_VIDEO_H265_PROFILE_IDC_MAIN_10 = 2, + STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE = 3, + STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS = 4, + STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS = 9, + STD_VIDEO_H265_PROFILE_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265ProfileIdc; + +typedef enum StdVideoH265LevelIdc { + STD_VIDEO_H265_LEVEL_IDC_1_0 = 0, + STD_VIDEO_H265_LEVEL_IDC_2_0 = 1, + STD_VIDEO_H265_LEVEL_IDC_2_1 = 2, + STD_VIDEO_H265_LEVEL_IDC_3_0 = 3, + STD_VIDEO_H265_LEVEL_IDC_3_1 = 4, + STD_VIDEO_H265_LEVEL_IDC_4_0 = 5, + STD_VIDEO_H265_LEVEL_IDC_4_1 = 6, + STD_VIDEO_H265_LEVEL_IDC_5_0 = 7, + STD_VIDEO_H265_LEVEL_IDC_5_1 = 8, + STD_VIDEO_H265_LEVEL_IDC_5_2 = 9, + STD_VIDEO_H265_LEVEL_IDC_6_0 = 10, + STD_VIDEO_H265_LEVEL_IDC_6_1 = 11, + STD_VIDEO_H265_LEVEL_IDC_6_2 = 12, + STD_VIDEO_H265_LEVEL_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265LevelIdc; + +typedef enum StdVideoH265SliceType { + STD_VIDEO_H265_SLICE_TYPE_B = 0, + STD_VIDEO_H265_SLICE_TYPE_P = 1, + STD_VIDEO_H265_SLICE_TYPE_I = 2, + STD_VIDEO_H265_SLICE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265SliceType; + +typedef enum StdVideoH265PictureType { + STD_VIDEO_H265_PICTURE_TYPE_P = 0, + STD_VIDEO_H265_PICTURE_TYPE_B = 1, + STD_VIDEO_H265_PICTURE_TYPE_I = 2, + STD_VIDEO_H265_PICTURE_TYPE_IDR = 3, + STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265PictureType; + +typedef enum StdVideoH265AspectRatioIdc { + STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0, + STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1, + STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2, + STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3, + STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4, + STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5, + STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6, + STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7, + STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8, + STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9, + STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10, + STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11, + STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12, + STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13, + STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14, + STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15, + STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16, + STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255, + STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265AspectRatioIdc; +typedef struct StdVideoH265DecPicBufMgr { + uint32_t max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint8_t max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint8_t max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; +} StdVideoH265DecPicBufMgr; + +typedef struct StdVideoH265SubLayerHrdParameters { + uint32_t bit_rate_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t bit_rate_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cbr_flag; +} StdVideoH265SubLayerHrdParameters; + +typedef struct StdVideoH265HrdFlags { + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint32_t fixed_pic_rate_general_flag : 8; + uint32_t fixed_pic_rate_within_cvs_flag : 8; + uint32_t low_delay_hrd_flag : 8; +} StdVideoH265HrdFlags; + +typedef struct StdVideoH265HrdParameters { + StdVideoH265HrdFlags flags; + uint8_t tick_divisor_minus2; + uint8_t du_cpb_removal_delay_increment_length_minus1; + uint8_t dpb_output_delay_du_length_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t cpb_size_du_scale; + uint8_t initial_cpb_removal_delay_length_minus1; + uint8_t au_cpb_removal_delay_length_minus1; + uint8_t dpb_output_delay_length_minus1; + uint8_t cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint16_t elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint16_t reserved[3]; + const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersNal; + const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersVcl; +} StdVideoH265HrdParameters; + +typedef struct StdVideoH265VpsFlags { + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; +} StdVideoH265VpsFlags; + +typedef struct StdVideoH265ProfileTierLevelFlags { + uint32_t general_tier_flag : 1; + uint32_t general_progressive_source_flag : 1; + uint32_t general_interlaced_source_flag : 1; + uint32_t general_non_packed_constraint_flag : 1; + uint32_t general_frame_only_constraint_flag : 1; +} StdVideoH265ProfileTierLevelFlags; + +typedef struct StdVideoH265ProfileTierLevel { + StdVideoH265ProfileTierLevelFlags flags; + StdVideoH265ProfileIdc general_profile_idc; + StdVideoH265LevelIdc general_level_idc; +} StdVideoH265ProfileTierLevel; + +typedef struct StdVideoH265VideoParameterSet { + StdVideoH265VpsFlags flags; + uint8_t vps_video_parameter_set_id; + uint8_t vps_max_sub_layers_minus1; + uint8_t reserved1; + uint8_t reserved2; + uint32_t vps_num_units_in_tick; + uint32_t vps_time_scale; + uint32_t vps_num_ticks_poc_diff_one_minus1; + uint32_t reserved3; + const StdVideoH265DecPicBufMgr* pDecPicBufMgr; + const StdVideoH265HrdParameters* pHrdParameters; + const StdVideoH265ProfileTierLevel* pProfileTierLevel; +} StdVideoH265VideoParameterSet; + +typedef struct StdVideoH265ScalingLists { + uint8_t ScalingList4x4[STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS]; + uint8_t ScalingList8x8[STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS]; + uint8_t ScalingList16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS]; + uint8_t ScalingList32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS]; + uint8_t ScalingListDCCoef16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS]; + uint8_t ScalingListDCCoef32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS]; +} StdVideoH265ScalingLists; + +typedef struct StdVideoH265SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; +} StdVideoH265SpsVuiFlags; + +typedef struct StdVideoH265SequenceParameterSetVui { + StdVideoH265SpsVuiFlags flags; + StdVideoH265AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coeffs; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint8_t reserved1; + uint8_t reserved2; + uint16_t def_disp_win_left_offset; + uint16_t def_disp_win_right_offset; + uint16_t def_disp_win_top_offset; + uint16_t def_disp_win_bottom_offset; + uint32_t vui_num_units_in_tick; + uint32_t vui_time_scale; + uint32_t vui_num_ticks_poc_diff_one_minus1; + uint16_t min_spatial_segmentation_idc; + uint16_t reserved3; + uint8_t max_bytes_per_pic_denom; + uint8_t max_bits_per_min_cu_denom; + uint8_t log2_max_mv_length_horizontal; + uint8_t log2_max_mv_length_vertical; + const StdVideoH265HrdParameters* pHrdParameters; +} StdVideoH265SequenceParameterSetVui; + +typedef struct StdVideoH265PredictorPaletteEntries { + uint16_t PredictorPaletteEntries[STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE][STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE]; +} StdVideoH265PredictorPaletteEntries; + +typedef struct StdVideoH265SpsFlags { + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t conformance_window_flag : 1; + uint32_t sps_sub_layer_ordering_info_present_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + uint32_t sps_scc_extension_flag : 1; + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializers_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; +} StdVideoH265SpsFlags; + +typedef struct StdVideoH265ShortTermRefPicSetFlags { + uint32_t inter_ref_pic_set_prediction_flag : 1; + uint32_t delta_rps_sign : 1; +} StdVideoH265ShortTermRefPicSetFlags; + +typedef struct StdVideoH265ShortTermRefPicSet { + StdVideoH265ShortTermRefPicSetFlags flags; + uint32_t delta_idx_minus1; + uint16_t use_delta_flag; + uint16_t abs_delta_rps_minus1; + uint16_t used_by_curr_pic_flag; + uint16_t used_by_curr_pic_s0_flag; + uint16_t used_by_curr_pic_s1_flag; + uint16_t reserved1; + uint8_t reserved2; + uint8_t reserved3; + uint8_t num_negative_pics; + uint8_t num_positive_pics; + uint16_t delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE]; + uint16_t delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE]; +} StdVideoH265ShortTermRefPicSet; + +typedef struct StdVideoH265LongTermRefPicsSps { + uint32_t used_by_curr_pic_lt_sps_flag; + uint32_t lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS]; +} StdVideoH265LongTermRefPicsSps; + +typedef struct StdVideoH265SequenceParameterSet { + StdVideoH265SpsFlags flags; + StdVideoH265ChromaFormatIdc chroma_format_idc; + uint32_t pic_width_in_luma_samples; + uint32_t pic_height_in_luma_samples; + uint8_t sps_video_parameter_set_id; + uint8_t sps_max_sub_layers_minus1; + uint8_t sps_seq_parameter_set_id; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t log2_min_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_luma_coding_block_size; + uint8_t log2_min_luma_transform_block_size_minus2; + uint8_t log2_diff_max_min_luma_transform_block_size; + uint8_t max_transform_hierarchy_depth_inter; + uint8_t max_transform_hierarchy_depth_intra; + uint8_t num_short_term_ref_pic_sets; + uint8_t num_long_term_ref_pics_sps; + uint8_t pcm_sample_bit_depth_luma_minus1; + uint8_t pcm_sample_bit_depth_chroma_minus1; + uint8_t log2_min_pcm_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size; + uint8_t reserved1; + uint8_t reserved2; + uint8_t palette_max_size; + uint8_t delta_palette_max_predictor_size; + uint8_t motion_vector_resolution_control_idc; + uint8_t sps_num_palette_predictor_initializers_minus1; + uint32_t conf_win_left_offset; + uint32_t conf_win_right_offset; + uint32_t conf_win_top_offset; + uint32_t conf_win_bottom_offset; + const StdVideoH265ProfileTierLevel* pProfileTierLevel; + const StdVideoH265DecPicBufMgr* pDecPicBufMgr; + const StdVideoH265ScalingLists* pScalingLists; + const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet; + const StdVideoH265LongTermRefPicsSps* pLongTermRefPicsSps; + const StdVideoH265SequenceParameterSetVui* pSequenceParameterSetVui; + const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; +} StdVideoH265SequenceParameterSet; + +typedef struct StdVideoH265PpsFlags { + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializers_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; +} StdVideoH265PpsFlags; + +typedef struct StdVideoH265PictureParameterSet { + StdVideoH265PpsFlags flags; + uint8_t pps_pic_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t sps_video_parameter_set_id; + uint8_t num_extra_slice_header_bits; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + int8_t init_qp_minus26; + uint8_t diff_cu_qp_delta_depth; + int8_t pps_cb_qp_offset; + int8_t pps_cr_qp_offset; + int8_t pps_beta_offset_div2; + int8_t pps_tc_offset_div2; + uint8_t log2_parallel_merge_level_minus2; + uint8_t log2_max_transform_skip_block_size_minus2; + uint8_t diff_cu_chroma_qp_offset_depth; + uint8_t chroma_qp_offset_list_len_minus1; + int8_t cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE]; + int8_t cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE]; + uint8_t log2_sao_offset_scale_luma; + uint8_t log2_sao_offset_scale_chroma; + int8_t pps_act_y_qp_offset_plus5; + int8_t pps_act_cb_qp_offset_plus5; + int8_t pps_act_cr_qp_offset_plus3; + uint8_t pps_num_palette_predictor_initializers; + uint8_t luma_bit_depth_entry_minus8; + uint8_t chroma_bit_depth_entry_minus8; + uint8_t num_tile_columns_minus1; + uint8_t num_tile_rows_minus1; + uint8_t reserved1; + uint8_t reserved2; + uint16_t column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE]; + uint16_t row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE]; + uint32_t reserved3; + const StdVideoH265ScalingLists* pScalingLists; + const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; +} StdVideoH265PictureParameterSet; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std_decode.h b/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std_decode.h new file mode 100644 index 0000000..75cf4d0 --- /dev/null +++ b/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std_decode.h @@ -0,0 +1,67 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std_decode 1 +#include "vulkan_video_codec_h265std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode" +#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8 +typedef struct StdVideoDecodeH265PictureInfoFlags { + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; +} StdVideoDecodeH265PictureInfoFlags; + +typedef struct StdVideoDecodeH265PictureInfo { + StdVideoDecodeH265PictureInfoFlags flags; + uint8_t sps_video_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t NumDeltaPocsOfRefRpsIdx; + int32_t PicOrderCntVal; + uint16_t NumBitsForSTRefPicSetInSlice; + uint16_t reserved; + uint8_t RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; + uint8_t RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; + uint8_t RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; +} StdVideoDecodeH265PictureInfo; + +typedef struct StdVideoDecodeH265ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; +} StdVideoDecodeH265ReferenceInfoFlags; + +typedef struct StdVideoDecodeH265ReferenceInfo { + StdVideoDecodeH265ReferenceInfoFlags flags; + int32_t PicOrderCntVal; +} StdVideoDecodeH265ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std_encode.h b/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std_encode.h new file mode 100644 index 0000000..c36b7e7 --- /dev/null +++ b/MacroLibX/third_party/vk_video/vulkan_video_codec_h265std_encode.h @@ -0,0 +1,157 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std_encode 1 +#include "vulkan_video_codec_h265std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode" +typedef struct StdVideoEncodeH265WeightTableFlags { + uint16_t luma_weight_l0_flag; + uint16_t chroma_weight_l0_flag; + uint16_t luma_weight_l1_flag; + uint16_t chroma_weight_l1_flag; +} StdVideoEncodeH265WeightTableFlags; + +typedef struct StdVideoEncodeH265WeightTable { + StdVideoEncodeH265WeightTableFlags flags; + uint8_t luma_log2_weight_denom; + int8_t delta_chroma_log2_weight_denom; + int8_t delta_luma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t luma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t delta_chroma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_chroma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_luma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t luma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t delta_chroma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_chroma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; +} StdVideoEncodeH265WeightTable; + +typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags { + uint32_t first_slice_segment_in_pic_flag : 1; + uint32_t dependent_slice_segment_flag : 1; + uint32_t slice_sao_luma_flag : 1; + uint32_t slice_sao_chroma_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t mvd_l1_zero_flag : 1; + uint32_t cabac_init_flag : 1; + uint32_t cu_chroma_qp_offset_enabled_flag : 1; + uint32_t deblocking_filter_override_flag : 1; + uint32_t slice_deblocking_filter_disabled_flag : 1; + uint32_t collocated_from_l0_flag : 1; + uint32_t slice_loop_filter_across_slices_enabled_flag : 1; + uint32_t reserved : 20; +} StdVideoEncodeH265SliceSegmentHeaderFlags; + +typedef struct StdVideoEncodeH265SliceSegmentHeader { + StdVideoEncodeH265SliceSegmentHeaderFlags flags; + StdVideoH265SliceType slice_type; + uint32_t slice_segment_address; + uint8_t collocated_ref_idx; + uint8_t MaxNumMergeCand; + int8_t slice_cb_qp_offset; + int8_t slice_cr_qp_offset; + int8_t slice_beta_offset_div2; + int8_t slice_tc_offset_div2; + int8_t slice_act_y_qp_offset; + int8_t slice_act_cb_qp_offset; + int8_t slice_act_cr_qp_offset; + int8_t slice_qp_delta; + uint16_t reserved1; + const StdVideoEncodeH265WeightTable* pWeightTable; +} StdVideoEncodeH265SliceSegmentHeader; + +typedef struct StdVideoEncodeH265ReferenceListsInfoFlags { + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; +} StdVideoEncodeH265ReferenceListsInfoFlags; + +typedef struct StdVideoEncodeH265ReferenceListsInfo { + StdVideoEncodeH265ReferenceListsInfoFlags flags; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + uint8_t RefPicList0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t RefPicList1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t list_entry_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t list_entry_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; +} StdVideoEncodeH265ReferenceListsInfo; + +typedef struct StdVideoEncodeH265PictureInfoFlags { + uint32_t is_reference : 1; + uint32_t IrapPicFlag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t discardable_flag : 1; + uint32_t cross_layer_bla_flag : 1; + uint32_t pic_output_flag : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + uint32_t slice_temporal_mvp_enabled_flag : 1; + uint32_t reserved : 23; +} StdVideoEncodeH265PictureInfoFlags; + +typedef struct StdVideoEncodeH265LongTermRefPics { + uint8_t num_long_term_sps; + uint8_t num_long_term_pics; + uint8_t lt_idx_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS]; + uint8_t poc_lsb_lt[STD_VIDEO_H265_MAX_LONG_TERM_PICS]; + uint16_t used_by_curr_pic_lt_flag; + uint8_t delta_poc_msb_present_flag[STD_VIDEO_H265_MAX_DELTA_POC]; + uint8_t delta_poc_msb_cycle_lt[STD_VIDEO_H265_MAX_DELTA_POC]; +} StdVideoEncodeH265LongTermRefPics; + +typedef struct StdVideoEncodeH265PictureInfo { + StdVideoEncodeH265PictureInfoFlags flags; + StdVideoH265PictureType pic_type; + uint8_t sps_video_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t short_term_ref_pic_set_idx; + int32_t PicOrderCntVal; + uint8_t TemporalId; + uint8_t reserved1[7]; + const StdVideoEncodeH265ReferenceListsInfo* pRefLists; + const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet; + const StdVideoEncodeH265LongTermRefPics* pLongTermRefPics; +} StdVideoEncodeH265PictureInfo; + +typedef struct StdVideoEncodeH265ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + uint32_t reserved : 30; +} StdVideoEncodeH265ReferenceInfoFlags; + +typedef struct StdVideoEncodeH265ReferenceInfo { + StdVideoEncodeH265ReferenceInfoFlags flags; + StdVideoH265PictureType pic_type; + int32_t PicOrderCntVal; + uint8_t TemporalId; +} StdVideoEncodeH265ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vk_video/vulkan_video_codecs_common.h b/MacroLibX/third_party/vk_video/vulkan_video_codecs_common.h new file mode 100644 index 0000000..6568975 --- /dev/null +++ b/MacroLibX/third_party/vk_video/vulkan_video_codecs_common.h @@ -0,0 +1,36 @@ +#ifndef VULKAN_VIDEO_CODECS_COMMON_H_ +#define VULKAN_VIDEO_CODECS_COMMON_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codecs_common is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codecs_common 1 +#if !defined(VK_NO_STDINT_H) + #include +#endif + +#define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vma.h b/MacroLibX/third_party/vma.h new file mode 100644 index 0000000..60f5720 --- /dev/null +++ b/MacroLibX/third_party/vma.h @@ -0,0 +1,19558 @@ +// +// Copyright (c) 2017-2022 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H +#define AMD_VULKAN_MEMORY_ALLOCATOR_H + +/** \mainpage Vulkan Memory Allocator + +Version 3.0.1 (2022-05-26) + +Copyright (c) 2017-2022 Advanced Micro Devices, Inc. All rights reserved. \n +License: MIT + +API documentation divided into groups: [Modules](modules.html) + +\section main_table_of_contents Table of contents + +- User guide + - \subpage quick_start + - [Project setup](@ref quick_start_project_setup) + - [Initialization](@ref quick_start_initialization) + - [Resource allocation](@ref quick_start_resource_allocation) + - \subpage choosing_memory_type + - [Usage](@ref choosing_memory_type_usage) + - [Required and preferred flags](@ref choosing_memory_type_required_preferred_flags) + - [Explicit memory types](@ref choosing_memory_type_explicit_memory_types) + - [Custom memory pools](@ref choosing_memory_type_custom_memory_pools) + - [Dedicated allocations](@ref choosing_memory_type_dedicated_allocations) + - \subpage memory_mapping + - [Mapping functions](@ref memory_mapping_mapping_functions) + - [Persistently mapped memory](@ref memory_mapping_persistently_mapped_memory) + - [Cache flush and invalidate](@ref memory_mapping_cache_control) + - \subpage staying_within_budget + - [Querying for budget](@ref staying_within_budget_querying_for_budget) + - [Controlling memory usage](@ref staying_within_budget_controlling_memory_usage) + - \subpage resource_aliasing + - \subpage custom_memory_pools + - [Choosing memory type index](@ref custom_memory_pools_MemTypeIndex) + - [Linear allocation algorithm](@ref linear_algorithm) + - [Free-at-once](@ref linear_algorithm_free_at_once) + - [Stack](@ref linear_algorithm_stack) + - [Double stack](@ref linear_algorithm_double_stack) + - [Ring buffer](@ref linear_algorithm_ring_buffer) + - \subpage defragmentation + - \subpage statistics + - [Numeric statistics](@ref statistics_numeric_statistics) + - [JSON dump](@ref statistics_json_dump) + - \subpage allocation_annotation + - [Allocation user data](@ref allocation_user_data) + - [Allocation names](@ref allocation_names) + - \subpage virtual_allocator + - \subpage debugging_memory_usage + - [Memory initialization](@ref debugging_memory_usage_initialization) + - [Margins](@ref debugging_memory_usage_margins) + - [Corruption detection](@ref debugging_memory_usage_corruption_detection) + - \subpage opengl_interop +- \subpage usage_patterns + - [GPU-only resource](@ref usage_patterns_gpu_only) + - [Staging copy for upload](@ref usage_patterns_staging_copy_upload) + - [Readback](@ref usage_patterns_readback) + - [Advanced data uploading](@ref usage_patterns_advanced_data_uploading) + - [Other use cases](@ref usage_patterns_other_use_cases) +- \subpage configuration + - [Pointers to Vulkan functions](@ref config_Vulkan_functions) + - [Custom host memory allocator](@ref custom_memory_allocator) + - [Device memory allocation callbacks](@ref allocation_callbacks) + - [Device heap memory limit](@ref heap_memory_limit) +- Extension support + - \subpage vk_khr_dedicated_allocation + - \subpage enabling_buffer_device_address + - \subpage vk_ext_memory_priority + - \subpage vk_amd_device_coherent_memory +- \subpage general_considerations + - [Thread safety](@ref general_considerations_thread_safety) + - [Versioning and compatibility](@ref general_considerations_versioning_and_compatibility) + - [Validation layer warnings](@ref general_considerations_validation_layer_warnings) + - [Allocation algorithm](@ref general_considerations_allocation_algorithm) + - [Features not supported](@ref general_considerations_features_not_supported) + +\section main_see_also See also + +- [**Product page on GPUOpen**](https://gpuopen.com/gaming-product/vulkan-memory-allocator/) +- [**Source repository on GitHub**](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator) + +\defgroup group_init Library initialization + +\brief API elements related to the initialization and management of the entire library, especially #VmaAllocator object. + +\defgroup group_alloc Memory allocation + +\brief API elements related to the allocation, deallocation, and management of Vulkan memory, buffers, images. +Most basic ones being: vmaCreateBuffer(), vmaCreateImage(). + +\defgroup group_virtual Virtual allocator + +\brief API elements related to the mechanism of \ref virtual_allocator - using the core allocation algorithm +for user-defined purpose without allocating any real GPU memory. + +\defgroup group_stats Statistics + +\brief API elements that query current status of the allocator, from memory usage, budget, to full dump of the internal state in JSON format. +See documentation chapter: \ref statistics. +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef VULKAN_H_ + #include +#endif + +// Define this macro to declare maximum supported Vulkan version in format AAABBBCCC, +// where AAA = major, BBB = minor, CCC = patch. +// If you want to use version > 1.0, it still needs to be enabled via VmaAllocatorCreateInfo::vulkanApiVersion. +#if !defined(VMA_VULKAN_VERSION) + #if defined(VK_VERSION_1_3) + #define VMA_VULKAN_VERSION 1003000 + #elif defined(VK_VERSION_1_2) + #define VMA_VULKAN_VERSION 1002000 + #elif defined(VK_VERSION_1_1) + #define VMA_VULKAN_VERSION 1001000 + #else + #define VMA_VULKAN_VERSION 1000000 + #endif +#endif + +#if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS + extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; + extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; + extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; + extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; + extern PFN_vkAllocateMemory vkAllocateMemory; + extern PFN_vkFreeMemory vkFreeMemory; + extern PFN_vkMapMemory vkMapMemory; + extern PFN_vkUnmapMemory vkUnmapMemory; + extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; + extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; + extern PFN_vkBindBufferMemory vkBindBufferMemory; + extern PFN_vkBindImageMemory vkBindImageMemory; + extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + extern PFN_vkCreateBuffer vkCreateBuffer; + extern PFN_vkDestroyBuffer vkDestroyBuffer; + extern PFN_vkCreateImage vkCreateImage; + extern PFN_vkDestroyImage vkDestroyImage; + extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + #if VMA_VULKAN_VERSION >= 1001000 + extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; + extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; + extern PFN_vkBindBufferMemory2 vkBindBufferMemory2; + extern PFN_vkBindImageMemory2 vkBindImageMemory2; + extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; + #endif // #if VMA_VULKAN_VERSION >= 1001000 +#endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES + +#if !defined(VMA_DEDICATED_ALLOCATION) + #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation + #define VMA_DEDICATED_ALLOCATION 1 + #else + #define VMA_DEDICATED_ALLOCATION 0 + #endif +#endif + +#if !defined(VMA_BIND_MEMORY2) + #if VK_KHR_bind_memory2 + #define VMA_BIND_MEMORY2 1 + #else + #define VMA_BIND_MEMORY2 0 + #endif +#endif + +#if !defined(VMA_MEMORY_BUDGET) + #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000) + #define VMA_MEMORY_BUDGET 1 + #else + #define VMA_MEMORY_BUDGET 0 + #endif +#endif + +// Defined to 1 when VK_KHR_buffer_device_address device extension or equivalent core Vulkan 1.2 feature is defined in its headers. +#if !defined(VMA_BUFFER_DEVICE_ADDRESS) + #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000 + #define VMA_BUFFER_DEVICE_ADDRESS 1 + #else + #define VMA_BUFFER_DEVICE_ADDRESS 0 + #endif +#endif + +// Defined to 1 when VK_EXT_memory_priority device extension is defined in Vulkan headers. +#if !defined(VMA_MEMORY_PRIORITY) + #if VK_EXT_memory_priority + #define VMA_MEMORY_PRIORITY 1 + #else + #define VMA_MEMORY_PRIORITY 0 + #endif +#endif + +// Defined to 1 when VK_KHR_external_memory device extension is defined in Vulkan headers. +#if !defined(VMA_EXTERNAL_MEMORY) + #if VK_KHR_external_memory + #define VMA_EXTERNAL_MEMORY 1 + #else + #define VMA_EXTERNAL_MEMORY 0 + #endif +#endif + +// Define these macros to decorate all public functions with additional code, +// before and after returned type, appropriately. This may be useful for +// exporting the functions when compiling VMA as a separate library. Example: +// #define VMA_CALL_PRE __declspec(dllexport) +// #define VMA_CALL_POST __cdecl +#ifndef VMA_CALL_PRE + #define VMA_CALL_PRE +#endif +#ifndef VMA_CALL_POST + #define VMA_CALL_POST +#endif + +// Define this macro to decorate pointers with an attribute specifying the +// length of the array they point to if they are not null. +// +// The length may be one of +// - The name of another parameter in the argument list where the pointer is declared +// - The name of another member in the struct where the pointer is declared +// - The name of a member of a struct type, meaning the value of that member in +// the context of the call. For example +// VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount"), +// this means the number of memory heaps available in the device associated +// with the VmaAllocator being dealt with. +#ifndef VMA_LEN_IF_NOT_NULL + #define VMA_LEN_IF_NOT_NULL(len) +#endif + +// The VMA_NULLABLE macro is defined to be _Nullable when compiling with Clang. +// see: https://clang.llvm.org/docs/AttributeReference.html#nullable +#ifndef VMA_NULLABLE + #ifdef __clang__ + #define VMA_NULLABLE _Nullable + #else + #define VMA_NULLABLE + #endif +#endif + +// The VMA_NOT_NULL macro is defined to be _Nonnull when compiling with Clang. +// see: https://clang.llvm.org/docs/AttributeReference.html#nonnull +#ifndef VMA_NOT_NULL + #ifdef __clang__ + #define VMA_NOT_NULL _Nonnull + #else + #define VMA_NOT_NULL + #endif +#endif + +// If non-dispatchable handles are represented as pointers then we can give +// then nullability annotations +#ifndef VMA_NOT_NULL_NON_DISPATCHABLE + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL + #else + #define VMA_NOT_NULL_NON_DISPATCHABLE + #endif +#endif + +#ifndef VMA_NULLABLE_NON_DISPATCHABLE + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE + #else + #define VMA_NULLABLE_NON_DISPATCHABLE + #endif +#endif + +#ifndef VMA_STATS_STRING_ENABLED + #define VMA_STATS_STRING_ENABLED 1 +#endif + +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// +// +// INTERFACE +// +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// Sections for managing code placement in file, only for development purposes e.g. for convenient folding inside an IDE. +#ifndef _VMA_ENUM_DECLARATIONS + +/** +\addtogroup group_init +@{ +*/ + +/// Flags for created #VmaAllocator. +typedef enum VmaAllocatorCreateFlagBits +{ + /** \brief Allocator and all objects created from it will not be synchronized internally, so you must guarantee they are used from only one thread at a time or synchronized externally by you. + + Using this flag may increase performance because internal mutexes are not used. + */ + VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, + /** \brief Enables usage of VK_KHR_dedicated_allocation extension. + + The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`. + When it is `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1. + + Using this extension will automatically allocate dedicated blocks of memory for + some buffers and images instead of suballocating place for them out of bigger + memory blocks (as if you explicitly used #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT + flag) when it is recommended by the driver. It may improve performance on some + GPUs. + + You may set this flag only if you found out that following device extensions are + supported, you enabled them while creating Vulkan device passed as + VmaAllocatorCreateInfo::device, and you want them to be used internally by this + library: + + - VK_KHR_get_memory_requirements2 (device extension) + - VK_KHR_dedicated_allocation (device extension) + + When this flag is set, you can experience following warnings reported by Vulkan + validation layer. You can ignore them. + + > vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer. + */ + VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT = 0x00000002, + /** + Enables usage of VK_KHR_bind_memory2 extension. + + The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`. + When it is `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1. + + You may set this flag only if you found out that this device extension is supported, + you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + and you want it to be used internally by this library. + + The extension provides functions `vkBindBufferMemory2KHR` and `vkBindImageMemory2KHR`, + which allow to pass a chain of `pNext` structures while binding. + This flag is required if you use `pNext` parameter in vmaBindBufferMemory2() or vmaBindImageMemory2(). + */ + VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT = 0x00000004, + /** + Enables usage of VK_EXT_memory_budget extension. + + You may set this flag only if you found out that this device extension is supported, + you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + and you want it to be used internally by this library, along with another instance extension + VK_KHR_get_physical_device_properties2, which is required by it (or Vulkan 1.1, where this extension is promoted). + + The extension provides query for current memory usage and budget, which will probably + be more accurate than an estimation used by the library otherwise. + */ + VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT = 0x00000008, + /** + Enables usage of VK_AMD_device_coherent_memory extension. + + You may set this flag only if you: + + - found out that this device extension is supported and enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + - checked that `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true and set it while creating the Vulkan device, + - want it to be used internally by this library. + + The extension and accompanying device feature provide access to memory types with + `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flags. + They are useful mostly for writing breadcrumb markers - a common method for debugging GPU crash/hang/TDR. + + When the extension is not enabled, such memory types are still enumerated, but their usage is illegal. + To protect from this error, if you don't create the allocator with this flag, it will refuse to allocate any memory or create a custom pool in such memory type, + returning `VK_ERROR_FEATURE_NOT_PRESENT`. + */ + VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT = 0x00000010, + /** + Enables usage of "buffer device address" feature, which allows you to use function + `vkGetBufferDeviceAddress*` to get raw GPU pointer to a buffer and pass it for usage inside a shader. + + You may set this flag only if you: + + 1. (For Vulkan version < 1.2) Found as available and enabled device extension + VK_KHR_buffer_device_address. + This extension is promoted to core Vulkan 1.2. + 2. Found as available and enabled device feature `VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress`. + + When this flag is set, you can create buffers with `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT` using VMA. + The library automatically adds `VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT` to + allocated memory blocks wherever it might be needed. + + For more information, see documentation chapter \ref enabling_buffer_device_address. + */ + VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT = 0x00000020, + /** + Enables usage of VK_EXT_memory_priority extension in the library. + + You may set this flag only if you found available and enabled this device extension, + along with `VkPhysicalDeviceMemoryPriorityFeaturesEXT::memoryPriority == VK_TRUE`, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + + When this flag is used, VmaAllocationCreateInfo::priority and VmaPoolCreateInfo::priority + are used to set priorities of allocated Vulkan memory. Without it, these variables are ignored. + + A priority must be a floating-point value between 0 and 1, indicating the priority of the allocation relative to other memory allocations. + Larger values are higher priority. The granularity of the priorities is implementation-dependent. + It is automatically passed to every call to `vkAllocateMemory` done by the library using structure `VkMemoryPriorityAllocateInfoEXT`. + The value to be used for default priority is 0.5. + For more details, see the documentation of the VK_EXT_memory_priority extension. + */ + VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT = 0x00000040, + + VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaAllocatorCreateFlagBits; +/// See #VmaAllocatorCreateFlagBits. +typedef VkFlags VmaAllocatorCreateFlags; + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/// \brief Intended usage of the allocated memory. +typedef enum VmaMemoryUsage +{ + /** No intended memory usage specified. + Use other members of VmaAllocationCreateInfo to specify your requirements. + */ + VMA_MEMORY_USAGE_UNKNOWN = 0, + /** + \deprecated Obsolete, preserved for backward compatibility. + Prefers `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ + VMA_MEMORY_USAGE_GPU_ONLY = 1, + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` and `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT`. + */ + VMA_MEMORY_USAGE_CPU_ONLY = 2, + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`, prefers `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ + VMA_MEMORY_USAGE_CPU_TO_GPU = 3, + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`, prefers `VK_MEMORY_PROPERTY_HOST_CACHED_BIT`. + */ + VMA_MEMORY_USAGE_GPU_TO_CPU = 4, + /** + \deprecated Obsolete, preserved for backward compatibility. + Prefers not `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ + VMA_MEMORY_USAGE_CPU_COPY = 5, + /** + Lazily allocated GPU memory having `VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT`. + Exists mostly on mobile platforms. Using it on desktop PC or other GPUs with no such memory type present will fail the allocation. + + Usage: Memory for transient attachment images (color attachments, depth attachments etc.), created with `VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT`. + + Allocations with this usage are always created as dedicated - it implies #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + */ + VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED = 6, + /** + Selects best memory type automatically. + This flag is recommended for most common use cases. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO = 7, + /** + Selects best memory type automatically with preference for GPU (device) memory. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE = 8, + /** + Selects best memory type automatically with preference for CPU (host) memory. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO_PREFER_HOST = 9, + + VMA_MEMORY_USAGE_MAX_ENUM = 0x7FFFFFFF +} VmaMemoryUsage; + +/// Flags to be passed as VmaAllocationCreateInfo::flags. +typedef enum VmaAllocationCreateFlagBits +{ + /** \brief Set this flag if the allocation should have its own memory block. + + Use it for special, big resources, like fullscreen images used as attachments. + */ + VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT = 0x00000001, + + /** \brief Set this flag to only try to allocate from existing `VkDeviceMemory` blocks and never create new such block. + + If new allocation cannot be placed in any of the existing blocks, allocation + fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY` error. + + You should not use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT and + #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT at the same time. It makes no sense. + */ + VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT = 0x00000002, + /** \brief Set this flag to use a memory that will be persistently mapped and retrieve pointer to it. + + Pointer to mapped memory will be returned through VmaAllocationInfo::pMappedData. + + It is valid to use this flag for allocation made from memory type that is not + `HOST_VISIBLE`. This flag is then ignored and memory is not mapped. This is + useful if you need an allocation that is efficient to use on GPU + (`DEVICE_LOCAL`) and still want to map it directly if possible on platforms that + support it (e.g. Intel GPU). + */ + VMA_ALLOCATION_CREATE_MAPPED_BIT = 0x00000004, + /** \deprecated Preserved for backward compatibility. Consider using vmaSetAllocationName() instead. + + Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a + null-terminated string. Instead of copying pointer value, a local copy of the + string is made and stored in allocation's `pName`. The string is automatically + freed together with the allocation. It is also used in vmaBuildStatsString(). + */ + VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020, + /** Allocation will be created from upper stack in a double stack pool. + + This flag is only allowed for custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT flag. + */ + VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = 0x00000040, + /** Create both buffer/image and allocation, but don't bind them together. + It is useful when you want to bind yourself to do some more advanced binding, e.g. using some extensions. + The flag is meaningful only with functions that bind by default: vmaCreateBuffer(), vmaCreateImage(). + Otherwise it is ignored. + + If you want to make sure the new buffer/image is not tied to the new memory allocation + through `VkMemoryDedicatedAllocateInfoKHR` structure in case the allocation ends up in its own memory block, + use also flag #VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT. + */ + VMA_ALLOCATION_CREATE_DONT_BIND_BIT = 0x00000080, + /** Create allocation only if additional device memory required for it, if any, won't exceed + memory budget. Otherwise return `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + */ + VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT = 0x00000100, + /** \brief Set this flag if the allocated memory will have aliasing resources. + + Usage of this flag prevents supplying `VkMemoryDedicatedAllocateInfoKHR` when #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT is specified. + Otherwise created dedicated memory will not be suitable for aliasing resources, resulting in Vulkan Validation Layer errors. + */ + VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT = 0x00000200, + /** + Requests possibility to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT). + + - If you use #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` value, + you must use this flag to be able to map the allocation. Otherwise, mapping is incorrect. + - If you use other value of #VmaMemoryUsage, this flag is ignored and mapping is always possible in memory types that are `HOST_VISIBLE`. + This includes allocations created in \ref custom_memory_pools. + + Declares that mapped memory will only be written sequentially, e.g. using `memcpy()` or a loop writing number-by-number, + never read or accessed randomly, so a memory type can be selected that is uncached and write-combined. + + \warning Violating this declaration may work correctly, but will likely be very slow. + Watch out for implicit reads introduced by doing e.g. `pMappedData[i] += x;` + Better prepare your data in a local variable and `memcpy()` it to the mapped pointer all at once. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT = 0x00000400, + /** + Requests possibility to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT). + + - If you use #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` value, + you must use this flag to be able to map the allocation. Otherwise, mapping is incorrect. + - If you use other value of #VmaMemoryUsage, this flag is ignored and mapping is always possible in memory types that are `HOST_VISIBLE`. + This includes allocations created in \ref custom_memory_pools. + + Declares that mapped memory can be read, written, and accessed in random order, + so a `HOST_CACHED` memory type is required. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT = 0x00000800, + /** + Together with #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT, + it says that despite request for host access, a not-`HOST_VISIBLE` memory type can be selected + if it may improve performance. + + By using this flag, you declare that you will check if the allocation ended up in a `HOST_VISIBLE` memory type + (e.g. using vmaGetAllocationMemoryProperties()) and if not, you will create some "staging" buffer and + issue an explicit transfer to write/read your data. + To prepare for this possibility, don't forget to add appropriate flags like + `VK_BUFFER_USAGE_TRANSFER_DST_BIT`, `VK_BUFFER_USAGE_TRANSFER_SRC_BIT` to the parameters of created buffer or image. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT = 0x00001000, + /** Allocation strategy that chooses smallest possible free range for the allocation + to minimize memory usage and fragmentation, possibly at the expense of allocation time. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = 0x00010000, + /** Allocation strategy that chooses first suitable free range for the allocation - + not necessarily in terms of the smallest offset but the one that is easiest and fastest to find + to minimize allocation time, possibly at the expense of allocation quality. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = 0x00020000, + /** Allocation strategy that chooses always the lowest offset in available space. + This is not the most efficient strategy but achieves highly packed data. + Used internally by defragmentation, not recomended in typical usage. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT = 0x00040000, + /** Alias to #VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT. + */ + VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT, + /** Alias to #VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT. + */ + VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT, + /** A bit mask to extract only `STRATEGY` bits from entire set of flags. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MASK = + VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT | + VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT | + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + + VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaAllocationCreateFlagBits; +/// See #VmaAllocationCreateFlagBits. +typedef VkFlags VmaAllocationCreateFlags; + +/// Flags to be passed as VmaPoolCreateInfo::flags. +typedef enum VmaPoolCreateFlagBits +{ + /** \brief Use this flag if you always allocate only buffers and linear images or only optimal images out of this pool and so Buffer-Image Granularity can be ignored. + + This is an optional optimization flag. + + If you always allocate using vmaCreateBuffer(), vmaCreateImage(), + vmaAllocateMemoryForBuffer(), then you don't need to use it because allocator + knows exact type of your allocations so it can handle Buffer-Image Granularity + in the optimal way. + + If you also allocate using vmaAllocateMemoryForImage() or vmaAllocateMemory(), + exact type of such allocations is not known, so allocator must be conservative + in handling Buffer-Image Granularity, which can lead to suboptimal allocation + (wasted memory). In that case, if you can make sure you always allocate only + buffers and linear images or only optimal images out of this pool, use this flag + to make allocator disregard Buffer-Image Granularity and so make allocations + faster and more optimal. + */ + VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT = 0x00000002, + + /** \brief Enables alternative, linear allocation algorithm in this pool. + + Specify this flag to enable linear allocation algorithm, which always creates + new allocations after last one and doesn't reuse space from allocations freed in + between. It trades memory consumption for simplified algorithm and data + structure, which has better performance and uses less memory for metadata. + + By using this flag, you can achieve behavior of free-at-once, stack, + ring buffer, and double stack. + For details, see documentation chapter \ref linear_algorithm. + */ + VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT = 0x00000004, + + /** Bit mask to extract only `ALGORITHM` bits from entire set of flags. + */ + VMA_POOL_CREATE_ALGORITHM_MASK = + VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT, + + VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaPoolCreateFlagBits; +/// Flags to be passed as VmaPoolCreateInfo::flags. See #VmaPoolCreateFlagBits. +typedef VkFlags VmaPoolCreateFlags; + +/// Flags to be passed as VmaDefragmentationInfo::flags. +typedef enum VmaDefragmentationFlagBits +{ + /* \brief Use simple but fast algorithm for defragmentation. + May not achieve best results but will require least time to compute and least allocations to copy. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT = 0x1, + /* \brief Default defragmentation algorithm, applied also when no `ALGORITHM` flag is specified. + Offers a balance between defragmentation quality and the amount of allocations and bytes that need to be moved. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT = 0x2, + /* \brief Perform full defragmentation of memory. + Can result in notably more time to compute and allocations to copy, but will achieve best memory packing. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT = 0x4, + /** \brief Use the most roboust algorithm at the cost of time to compute and number of copies to make. + Only available when bufferImageGranularity is greater than 1, since it aims to reduce + alignment issues between different types of resources. + Otherwise falls back to same behavior as #VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT = 0x8, + + /// A bit mask to extract only `ALGORITHM` bits from entire set of flags. + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK = + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT, + + VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaDefragmentationFlagBits; +/// See #VmaDefragmentationFlagBits. +typedef VkFlags VmaDefragmentationFlags; + +/// Operation performed on single defragmentation move. See structure #VmaDefragmentationMove. +typedef enum VmaDefragmentationMoveOperation +{ + /// Buffer/image has been recreated at `dstTmpAllocation`, data has been copied, old buffer/image has been destroyed. `srcAllocation` should be changed to point to the new place. This is the default value set by vmaBeginDefragmentationPass(). + VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY = 0, + /// Set this value if you cannot move the allocation. New place reserved at `dstTmpAllocation` will be freed. `srcAllocation` will remain unchanged. + VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE = 1, + /// Set this value if you decide to abandon the allocation and you destroyed the buffer/image. New place reserved at `dstTmpAllocation` will be freed, along with `srcAllocation`, which will be destroyed. + VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY = 2, +} VmaDefragmentationMoveOperation; + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/// Flags to be passed as VmaVirtualBlockCreateInfo::flags. +typedef enum VmaVirtualBlockCreateFlagBits +{ + /** \brief Enables alternative, linear allocation algorithm in this virtual block. + + Specify this flag to enable linear allocation algorithm, which always creates + new allocations after last one and doesn't reuse space from allocations freed in + between. It trades memory consumption for simplified algorithm and data + structure, which has better performance and uses less memory for metadata. + + By using this flag, you can achieve behavior of free-at-once, stack, + ring buffer, and double stack. + For details, see documentation chapter \ref linear_algorithm. + */ + VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT = 0x00000001, + + /** \brief Bit mask to extract only `ALGORITHM` bits from entire set of flags. + */ + VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK = + VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT, + + VMA_VIRTUAL_BLOCK_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaVirtualBlockCreateFlagBits; +/// Flags to be passed as VmaVirtualBlockCreateInfo::flags. See #VmaVirtualBlockCreateFlagBits. +typedef VkFlags VmaVirtualBlockCreateFlags; + +/// Flags to be passed as VmaVirtualAllocationCreateInfo::flags. +typedef enum VmaVirtualAllocationCreateFlagBits +{ + /** \brief Allocation will be created from upper stack in a double stack pool. + + This flag is only allowed for virtual blocks created with #VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT flag. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT, + /** \brief Allocation strategy that tries to minimize memory usage. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT, + /** \brief Allocation strategy that tries to minimize allocation time. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT, + /** Allocation strategy that chooses always the lowest offset in available space. + This is not the most efficient strategy but achieves highly packed data. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + /** \brief A bit mask to extract only `STRATEGY` bits from entire set of flags. + + These strategy flags are binary compatible with equivalent flags in #VmaAllocationCreateFlagBits. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK = VMA_ALLOCATION_CREATE_STRATEGY_MASK, + + VMA_VIRTUAL_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaVirtualAllocationCreateFlagBits; +/// Flags to be passed as VmaVirtualAllocationCreateInfo::flags. See #VmaVirtualAllocationCreateFlagBits. +typedef VkFlags VmaVirtualAllocationCreateFlags; + +/** @} */ + +#endif // _VMA_ENUM_DECLARATIONS + +#ifndef _VMA_DATA_TYPES_DECLARATIONS + +/** +\addtogroup group_init +@{ */ + +/** \struct VmaAllocator +\brief Represents main object of this library initialized. + +Fill structure #VmaAllocatorCreateInfo and call function vmaCreateAllocator() to create it. +Call function vmaDestroyAllocator() to destroy it. + +It is recommended to create just one object of this type per `VkDevice` object, +right after Vulkan is initialized and keep it alive until before Vulkan device is destroyed. +*/ +VK_DEFINE_HANDLE(VmaAllocator) + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** \struct VmaPool +\brief Represents custom memory pool + +Fill structure VmaPoolCreateInfo and call function vmaCreatePool() to create it. +Call function vmaDestroyPool() to destroy it. + +For more information see [Custom memory pools](@ref choosing_memory_type_custom_memory_pools). +*/ +VK_DEFINE_HANDLE(VmaPool) + +/** \struct VmaAllocation +\brief Represents single memory allocation. + +It may be either dedicated block of `VkDeviceMemory` or a specific region of a bigger block of this type +plus unique offset. + +There are multiple ways to create such object. +You need to fill structure VmaAllocationCreateInfo. +For more information see [Choosing memory type](@ref choosing_memory_type). + +Although the library provides convenience functions that create Vulkan buffer or image, +allocate memory for it and bind them together, +binding of the allocation to a buffer or an image is out of scope of the allocation itself. +Allocation object can exist without buffer/image bound, +binding can be done manually by the user, and destruction of it can be done +independently of destruction of the allocation. + +The object also remembers its size and some other information. +To retrieve this information, use function vmaGetAllocationInfo() and inspect +returned structure VmaAllocationInfo. +*/ +VK_DEFINE_HANDLE(VmaAllocation) + +/** \struct VmaDefragmentationContext +\brief An opaque object that represents started defragmentation process. + +Fill structure #VmaDefragmentationInfo and call function vmaBeginDefragmentation() to create it. +Call function vmaEndDefragmentation() to destroy it. +*/ +VK_DEFINE_HANDLE(VmaDefragmentationContext) + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/** \struct VmaVirtualAllocation +\brief Represents single memory allocation done inside VmaVirtualBlock. + +Use it as a unique identifier to virtual allocation within the single block. + +Use value `VK_NULL_HANDLE` to represent a null/invalid allocation. +*/ +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaVirtualAllocation); + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/** \struct VmaVirtualBlock +\brief Handle to a virtual block object that allows to use core allocation algorithm without allocating any real GPU memory. + +Fill in #VmaVirtualBlockCreateInfo structure and use vmaCreateVirtualBlock() to create it. Use vmaDestroyVirtualBlock() to destroy it. +For more information, see documentation chapter \ref virtual_allocator. + +This object is not thread-safe - should not be used from multiple threads simultaneously, must be synchronized externally. +*/ +VK_DEFINE_HANDLE(VmaVirtualBlock) + +/** @} */ + +/** +\addtogroup group_init +@{ +*/ + +/// Callback function called after successful vkAllocateMemory. +typedef void (VKAPI_PTR* PFN_vmaAllocateDeviceMemoryFunction)( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryType, + VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, + VkDeviceSize size, + void* VMA_NULLABLE pUserData); + +/// Callback function called before vkFreeMemory. +typedef void (VKAPI_PTR* PFN_vmaFreeDeviceMemoryFunction)( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryType, + VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, + VkDeviceSize size, + void* VMA_NULLABLE pUserData); + +/** \brief Set of callbacks that the library will call for `vkAllocateMemory` and `vkFreeMemory`. + +Provided for informative purpose, e.g. to gather statistics about number of +allocations or total amount of memory allocated in Vulkan. + +Used in VmaAllocatorCreateInfo::pDeviceMemoryCallbacks. +*/ +typedef struct VmaDeviceMemoryCallbacks +{ + /// Optional, can be null. + PFN_vmaAllocateDeviceMemoryFunction VMA_NULLABLE pfnAllocate; + /// Optional, can be null. + PFN_vmaFreeDeviceMemoryFunction VMA_NULLABLE pfnFree; + /// Optional, can be null. + void* VMA_NULLABLE pUserData; +} VmaDeviceMemoryCallbacks; + +/** \brief Pointers to some Vulkan functions - a subset used by the library. + +Used in VmaAllocatorCreateInfo::pVulkanFunctions. +*/ +typedef struct VmaVulkanFunctions +{ + /// Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS. + PFN_vkGetInstanceProcAddr VMA_NULLABLE vkGetInstanceProcAddr; + /// Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS. + PFN_vkGetDeviceProcAddr VMA_NULLABLE vkGetDeviceProcAddr; + PFN_vkGetPhysicalDeviceProperties VMA_NULLABLE vkGetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceMemoryProperties VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties; + PFN_vkAllocateMemory VMA_NULLABLE vkAllocateMemory; + PFN_vkFreeMemory VMA_NULLABLE vkFreeMemory; + PFN_vkMapMemory VMA_NULLABLE vkMapMemory; + PFN_vkUnmapMemory VMA_NULLABLE vkUnmapMemory; + PFN_vkFlushMappedMemoryRanges VMA_NULLABLE vkFlushMappedMemoryRanges; + PFN_vkInvalidateMappedMemoryRanges VMA_NULLABLE vkInvalidateMappedMemoryRanges; + PFN_vkBindBufferMemory VMA_NULLABLE vkBindBufferMemory; + PFN_vkBindImageMemory VMA_NULLABLE vkBindImageMemory; + PFN_vkGetBufferMemoryRequirements VMA_NULLABLE vkGetBufferMemoryRequirements; + PFN_vkGetImageMemoryRequirements VMA_NULLABLE vkGetImageMemoryRequirements; + PFN_vkCreateBuffer VMA_NULLABLE vkCreateBuffer; + PFN_vkDestroyBuffer VMA_NULLABLE vkDestroyBuffer; + PFN_vkCreateImage VMA_NULLABLE vkCreateImage; + PFN_vkDestroyImage VMA_NULLABLE vkDestroyImage; + PFN_vkCmdCopyBuffer VMA_NULLABLE vkCmdCopyBuffer; +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + /// Fetch "vkGetBufferMemoryRequirements2" on Vulkan >= 1.1, fetch "vkGetBufferMemoryRequirements2KHR" when using VK_KHR_dedicated_allocation extension. + PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR; + /// Fetch "vkGetImageMemoryRequirements2" on Vulkan >= 1.1, fetch "vkGetImageMemoryRequirements2KHR" when using VK_KHR_dedicated_allocation extension. + PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR; +#endif +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + /// Fetch "vkBindBufferMemory2" on Vulkan >= 1.1, fetch "vkBindBufferMemory2KHR" when using VK_KHR_bind_memory2 extension. + PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR; + /// Fetch "vkBindImageMemory2" on Vulkan >= 1.1, fetch "vkBindImageMemory2KHR" when using VK_KHR_bind_memory2 extension. + PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR; +#endif +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR; +#endif +#if VMA_VULKAN_VERSION >= 1003000 + /// Fetch from "vkGetDeviceBufferMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceBufferMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. + PFN_vkGetDeviceBufferMemoryRequirements VMA_NULLABLE vkGetDeviceBufferMemoryRequirements; + /// Fetch from "vkGetDeviceImageMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceImageMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. + PFN_vkGetDeviceImageMemoryRequirements VMA_NULLABLE vkGetDeviceImageMemoryRequirements; +#endif +} VmaVulkanFunctions; + +/// Description of a Allocator to be created. +typedef struct VmaAllocatorCreateInfo +{ + /// Flags for created allocator. Use #VmaAllocatorCreateFlagBits enum. + VmaAllocatorCreateFlags flags; + /// Vulkan physical device. + /** It must be valid throughout whole lifetime of created allocator. */ + VkPhysicalDevice VMA_NOT_NULL physicalDevice; + /// Vulkan device. + /** It must be valid throughout whole lifetime of created allocator. */ + VkDevice VMA_NOT_NULL device; + /// Preferred size of a single `VkDeviceMemory` block to be allocated from large heaps > 1 GiB. Optional. + /** Set to 0 to use default, which is currently 256 MiB. */ + VkDeviceSize preferredLargeHeapBlockSize; + /// Custom CPU memory allocation callbacks. Optional. + /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */ + const VkAllocationCallbacks* VMA_NULLABLE pAllocationCallbacks; + /// Informative callbacks for `vkAllocateMemory`, `vkFreeMemory`. Optional. + /** Optional, can be null. */ + const VmaDeviceMemoryCallbacks* VMA_NULLABLE pDeviceMemoryCallbacks; + /** \brief Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap. + + If not NULL, it must be a pointer to an array of + `VkPhysicalDeviceMemoryProperties::memoryHeapCount` elements, defining limit on + maximum number of bytes that can be allocated out of particular Vulkan memory + heap. + + Any of the elements may be equal to `VK_WHOLE_SIZE`, which means no limit on that + heap. This is also the default in case of `pHeapSizeLimit` = NULL. + + If there is a limit defined for a heap: + + - If user tries to allocate more memory from that heap using this allocator, + the allocation fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + - If the limit is smaller than heap size reported in `VkMemoryHeap::size`, the + value of this limit will be reported instead when using vmaGetMemoryProperties(). + + Warning! Using this feature may not be equivalent to installing a GPU with + smaller amount of memory, because graphics driver doesn't necessary fail new + allocations with `VK_ERROR_OUT_OF_DEVICE_MEMORY` result when memory capacity is + exceeded. It may return success and just silently migrate some device memory + blocks to system RAM. This driver behavior can also be controlled using + VK_AMD_memory_overallocation_behavior extension. + */ + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pHeapSizeLimit; + + /** \brief Pointers to Vulkan functions. Can be null. + + For details see [Pointers to Vulkan functions](@ref config_Vulkan_functions). + */ + const VmaVulkanFunctions* VMA_NULLABLE pVulkanFunctions; + /** \brief Handle to Vulkan instance object. + + Starting from version 3.0.0 this member is no longer optional, it must be set! + */ + VkInstance VMA_NOT_NULL instance; + /** \brief Optional. The highest version of Vulkan that the application is designed to use. + + It must be a value in the format as created by macro `VK_MAKE_VERSION` or a constant like: `VK_API_VERSION_1_1`, `VK_API_VERSION_1_0`. + The patch version number specified is ignored. Only the major and minor versions are considered. + It must be less or equal (preferably equal) to value as passed to `vkCreateInstance` as `VkApplicationInfo::apiVersion`. + Only versions 1.0, 1.1, 1.2, 1.3 are supported by the current implementation. + Leaving it initialized to zero is equivalent to `VK_API_VERSION_1_0`. + */ + uint32_t vulkanApiVersion; +#if VMA_EXTERNAL_MEMORY + /** \brief Either null or a pointer to an array of external memory handle types for each Vulkan memory type. + + If not NULL, it must be a pointer to an array of `VkPhysicalDeviceMemoryProperties::memoryTypeCount` + elements, defining external memory handle types of particular Vulkan memory type, + to be passed using `VkExportMemoryAllocateInfoKHR`. + + Any of the elements may be equal to 0, which means not to use `VkExportMemoryAllocateInfoKHR` on this memory type. + This is also the default in case of `pTypeExternalMemoryHandleTypes` = NULL. + */ + const VkExternalMemoryHandleTypeFlagsKHR* VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryTypeCount") pTypeExternalMemoryHandleTypes; +#endif // #if VMA_EXTERNAL_MEMORY +} VmaAllocatorCreateInfo; + +/// Information about existing #VmaAllocator object. +typedef struct VmaAllocatorInfo +{ + /** \brief Handle to Vulkan instance object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::instance. + */ + VkInstance VMA_NOT_NULL instance; + /** \brief Handle to Vulkan physical device object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::physicalDevice. + */ + VkPhysicalDevice VMA_NOT_NULL physicalDevice; + /** \brief Handle to Vulkan device object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::device. + */ + VkDevice VMA_NOT_NULL device; +} VmaAllocatorInfo; + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Calculated statistics of memory usage e.g. in a specific memory type, heap, custom pool, or total. + +These are fast to calculate. +See functions: vmaGetHeapBudgets(), vmaGetPoolStatistics(). +*/ +typedef struct VmaStatistics +{ + /** \brief Number of `VkDeviceMemory` objects - Vulkan memory blocks allocated. + */ + uint32_t blockCount; + /** \brief Number of #VmaAllocation objects allocated. + + Dedicated allocations have their own blocks, so each one adds 1 to `allocationCount` as well as `blockCount`. + */ + uint32_t allocationCount; + /** \brief Number of bytes allocated in `VkDeviceMemory` blocks. + + \note To avoid confusion, please be aware that what Vulkan calls an "allocation" - a whole `VkDeviceMemory` object + (e.g. as in `VkPhysicalDeviceLimits::maxMemoryAllocationCount`) is called a "block" in VMA, while VMA calls + "allocation" a #VmaAllocation object that represents a memory region sub-allocated from such block, usually for a single buffer or image. + */ + VkDeviceSize blockBytes; + /** \brief Total number of bytes occupied by all #VmaAllocation objects. + + Always less or equal than `blockBytes`. + Difference `(blockBytes - allocationBytes)` is the amount of memory allocated from Vulkan + but unused by any #VmaAllocation. + */ + VkDeviceSize allocationBytes; +} VmaStatistics; + +/** \brief More detailed statistics than #VmaStatistics. + +These are slower to calculate. Use for debugging purposes. +See functions: vmaCalculateStatistics(), vmaCalculatePoolStatistics(). + +Previous version of the statistics API provided averages, but they have been removed +because they can be easily calculated as: + +\code +VkDeviceSize allocationSizeAvg = detailedStats.statistics.allocationBytes / detailedStats.statistics.allocationCount; +VkDeviceSize unusedBytes = detailedStats.statistics.blockBytes - detailedStats.statistics.allocationBytes; +VkDeviceSize unusedRangeSizeAvg = unusedBytes / detailedStats.unusedRangeCount; +\endcode +*/ +typedef struct VmaDetailedStatistics +{ + /// Basic statistics. + VmaStatistics statistics; + /// Number of free ranges of memory between allocations. + uint32_t unusedRangeCount; + /// Smallest allocation size. `VK_WHOLE_SIZE` if there are 0 allocations. + VkDeviceSize allocationSizeMin; + /// Largest allocation size. 0 if there are 0 allocations. + VkDeviceSize allocationSizeMax; + /// Smallest empty range size. `VK_WHOLE_SIZE` if there are 0 empty ranges. + VkDeviceSize unusedRangeSizeMin; + /// Largest empty range size. 0 if there are 0 empty ranges. + VkDeviceSize unusedRangeSizeMax; +} VmaDetailedStatistics; + +/** \brief General statistics from current state of the Allocator - +total memory usage across all memory heaps and types. + +These are slower to calculate. Use for debugging purposes. +See function vmaCalculateStatistics(). +*/ +typedef struct VmaTotalStatistics +{ + VmaDetailedStatistics memoryType[VK_MAX_MEMORY_TYPES]; + VmaDetailedStatistics memoryHeap[VK_MAX_MEMORY_HEAPS]; + VmaDetailedStatistics total; +} VmaTotalStatistics; + +/** \brief Statistics of current memory usage and available budget for a specific memory heap. + +These are fast to calculate. +See function vmaGetHeapBudgets(). +*/ +typedef struct VmaBudget +{ + /** \brief Statistics fetched from the library. + */ + VmaStatistics statistics; + /** \brief Estimated current memory usage of the program, in bytes. + + Fetched from system using VK_EXT_memory_budget extension if enabled. + + It might be different than `statistics.blockBytes` (usually higher) due to additional implicit objects + also occupying the memory, like swapchain, pipelines, descriptor heaps, command buffers, or + `VkDeviceMemory` blocks allocated outside of this library, if any. + */ + VkDeviceSize usage; + /** \brief Estimated amount of memory available to the program, in bytes. + + Fetched from system using VK_EXT_memory_budget extension if enabled. + + It might be different (most probably smaller) than `VkMemoryHeap::size[heapIndex]` due to factors + external to the program, decided by the operating system. + Difference `budget - usage` is the amount of additional memory that can probably + be allocated without problems. Exceeding the budget may result in various problems. + */ + VkDeviceSize budget; +} VmaBudget; + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** \brief Parameters of new #VmaAllocation. + +To be used with functions like vmaCreateBuffer(), vmaCreateImage(), and many others. +*/ +typedef struct VmaAllocationCreateInfo +{ + /// Use #VmaAllocationCreateFlagBits enum. + VmaAllocationCreateFlags flags; + /** \brief Intended usage of memory. + + You can leave #VMA_MEMORY_USAGE_UNKNOWN if you specify memory requirements in other way. \n + If `pool` is not null, this member is ignored. + */ + VmaMemoryUsage usage; + /** \brief Flags that must be set in a Memory Type chosen for an allocation. + + Leave 0 if you specify memory requirements in other way. \n + If `pool` is not null, this member is ignored.*/ + VkMemoryPropertyFlags requiredFlags; + /** \brief Flags that preferably should be set in a memory type chosen for an allocation. + + Set to 0 if no additional flags are preferred. \n + If `pool` is not null, this member is ignored. */ + VkMemoryPropertyFlags preferredFlags; + /** \brief Bitmask containing one bit set for every memory type acceptable for this allocation. + + Value 0 is equivalent to `UINT32_MAX` - it means any memory type is accepted if + it meets other requirements specified by this structure, with no further + restrictions on memory type index. \n + If `pool` is not null, this member is ignored. + */ + uint32_t memoryTypeBits; + /** \brief Pool that this allocation should be created in. + + Leave `VK_NULL_HANDLE` to allocate from default pool. If not null, members: + `usage`, `requiredFlags`, `preferredFlags`, `memoryTypeBits` are ignored. + */ + VmaPool VMA_NULLABLE pool; + /** \brief Custom general-purpose pointer that will be stored in #VmaAllocation, can be read as VmaAllocationInfo::pUserData and changed using vmaSetAllocationUserData(). + + If #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT is used, it must be either + null or pointer to a null-terminated string. The string will be then copied to + internal buffer, so it doesn't need to be valid after allocation call. + */ + void* VMA_NULLABLE pUserData; + /** \brief A floating-point value between 0 and 1, indicating the priority of the allocation relative to other memory allocations. + + It is used only when #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT flag was used during creation of the #VmaAllocator object + and this allocation ends up as dedicated or is explicitly forced as dedicated using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + Otherwise, it has the priority of a memory block where it is placed and this variable is ignored. + */ + float priority; +} VmaAllocationCreateInfo; + +/// Describes parameter of created #VmaPool. +typedef struct VmaPoolCreateInfo +{ + /** \brief Vulkan memory type index to allocate this pool from. + */ + uint32_t memoryTypeIndex; + /** \brief Use combination of #VmaPoolCreateFlagBits. + */ + VmaPoolCreateFlags flags; + /** \brief Size of a single `VkDeviceMemory` block to be allocated as part of this pool, in bytes. Optional. + + Specify nonzero to set explicit, constant size of memory blocks used by this + pool. + + Leave 0 to use default and let the library manage block sizes automatically. + Sizes of particular blocks may vary. + In this case, the pool will also support dedicated allocations. + */ + VkDeviceSize blockSize; + /** \brief Minimum number of blocks to be always allocated in this pool, even if they stay empty. + + Set to 0 to have no preallocated blocks and allow the pool be completely empty. + */ + size_t minBlockCount; + /** \brief Maximum number of blocks that can be allocated in this pool. Optional. + + Set to 0 to use default, which is `SIZE_MAX`, which means no limit. + + Set to same value as VmaPoolCreateInfo::minBlockCount to have fixed amount of memory allocated + throughout whole lifetime of this pool. + */ + size_t maxBlockCount; + /** \brief A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relative to other memory allocations. + + It is used only when #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT flag was used during creation of the #VmaAllocator object. + Otherwise, this variable is ignored. + */ + float priority; + /** \brief Additional minimum alignment to be used for all allocations created from this pool. Can be 0. + + Leave 0 (default) not to impose any additional alignment. If not 0, it must be a power of two. + It can be useful in cases where alignment returned by Vulkan by functions like `vkGetBufferMemoryRequirements` is not enough, + e.g. when doing interop with OpenGL. + */ + VkDeviceSize minAllocationAlignment; + /** \brief Additional `pNext` chain to be attached to `VkMemoryAllocateInfo` used for every allocation made by this pool. Optional. + + Optional, can be null. If not null, it must point to a `pNext` chain of structures that can be attached to `VkMemoryAllocateInfo`. + It can be useful for special needs such as adding `VkExportMemoryAllocateInfoKHR`. + Structures pointed by this member must remain alive and unchanged for the whole lifetime of the custom pool. + + Please note that some structures, e.g. `VkMemoryPriorityAllocateInfoEXT`, `VkMemoryDedicatedAllocateInfoKHR`, + can be attached automatically by this library when using other, more convenient of its features. + */ + void* VMA_NULLABLE pMemoryAllocateNext; +} VmaPoolCreateInfo; + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/// Parameters of #VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo(). +typedef struct VmaAllocationInfo +{ + /** \brief Memory type index that this allocation was allocated from. + + It never changes. + */ + uint32_t memoryType; + /** \brief Handle to Vulkan memory object. + + Same memory object can be shared by multiple allocations. + + It can change after the allocation is moved during \ref defragmentation. + */ + VkDeviceMemory VMA_NULLABLE_NON_DISPATCHABLE deviceMemory; + /** \brief Offset in `VkDeviceMemory` object to the beginning of this allocation, in bytes. `(deviceMemory, offset)` pair is unique to this allocation. + + You usually don't need to use this offset. If you create a buffer or an image together with the allocation using e.g. function + vmaCreateBuffer(), vmaCreateImage(), functions that operate on these resources refer to the beginning of the buffer or image, + not entire device memory block. Functions like vmaMapMemory(), vmaBindBufferMemory() also refer to the beginning of the allocation + and apply this offset automatically. + + It can change after the allocation is moved during \ref defragmentation. + */ + VkDeviceSize offset; + /** \brief Size of this allocation, in bytes. + + It never changes. + + \note Allocation size returned in this variable may be greater than the size + requested for the resource e.g. as `VkBufferCreateInfo::size`. Whole size of the + allocation is accessible for operations on memory e.g. using a pointer after + mapping with vmaMapMemory(), but operations on the resource e.g. using + `vkCmdCopyBuffer` must be limited to the size of the resource. + */ + VkDeviceSize size; + /** \brief Pointer to the beginning of this allocation as mapped data. + + If the allocation hasn't been mapped using vmaMapMemory() and hasn't been + created with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag, this value is null. + + It can change after call to vmaMapMemory(), vmaUnmapMemory(). + It can also change after the allocation is moved during \ref defragmentation. + */ + void* VMA_NULLABLE pMappedData; + /** \brief Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vmaSetAllocationUserData(). + + It can change after call to vmaSetAllocationUserData() for this allocation. + */ + void* VMA_NULLABLE pUserData; + /** \brief Custom allocation name that was set with vmaSetAllocationName(). + + It can change after call to vmaSetAllocationName() for this allocation. + + Another way to set custom name is to pass it in VmaAllocationCreateInfo::pUserData with + additional flag #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT set [DEPRECATED]. + */ + const char* VMA_NULLABLE pName; +} VmaAllocationInfo; + +/** \brief Parameters for defragmentation. + +To be used with function vmaBeginDefragmentation(). +*/ +typedef struct VmaDefragmentationInfo +{ + /// \brief Use combination of #VmaDefragmentationFlagBits. + VmaDefragmentationFlags flags; + /** \brief Custom pool to be defragmented. + + If null then default pools will undergo defragmentation process. + */ + VmaPool VMA_NULLABLE pool; + /** \brief Maximum numbers of bytes that can be copied during single pass, while moving allocations to different places. + + `0` means no limit. + */ + VkDeviceSize maxBytesPerPass; + /** \brief Maximum number of allocations that can be moved during single pass to a different place. + + `0` means no limit. + */ + uint32_t maxAllocationsPerPass; +} VmaDefragmentationInfo; + +/// Single move of an allocation to be done for defragmentation. +typedef struct VmaDefragmentationMove +{ + /// Operation to be performed on the allocation by vmaEndDefragmentationPass(). Default value is #VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY. You can modify it. + VmaDefragmentationMoveOperation operation; + /// Allocation that should be moved. + VmaAllocation VMA_NOT_NULL srcAllocation; + /** \brief Temporary allocation pointing to destination memory that will replace `srcAllocation`. + + \warning Do not store this allocation in your data structures! It exists only temporarily, for the duration of the defragmentation pass, + to be used for binding new buffer/image to the destination memory using e.g. vmaBindBufferMemory(). + vmaEndDefragmentationPass() will destroy it and make `srcAllocation` point to this memory. + */ + VmaAllocation VMA_NOT_NULL dstTmpAllocation; +} VmaDefragmentationMove; + +/** \brief Parameters for incremental defragmentation steps. + +To be used with function vmaBeginDefragmentationPass(). +*/ +typedef struct VmaDefragmentationPassMoveInfo +{ + /// Number of elements in the `pMoves` array. + uint32_t moveCount; + /** \brief Array of moves to be performed by the user in the current defragmentation pass. + + Pointer to an array of `moveCount` elements, owned by VMA, created in vmaBeginDefragmentationPass(), destroyed in vmaEndDefragmentationPass(). + + For each element, you should: + + 1. Create a new buffer/image in the place pointed by VmaDefragmentationMove::dstMemory + VmaDefragmentationMove::dstOffset. + 2. Copy data from the VmaDefragmentationMove::srcAllocation e.g. using `vkCmdCopyBuffer`, `vkCmdCopyImage`. + 3. Make sure these commands finished executing on the GPU. + 4. Destroy the old buffer/image. + + Only then you can finish defragmentation pass by calling vmaEndDefragmentationPass(). + After this call, the allocation will point to the new place in memory. + + Alternatively, if you cannot move specific allocation, you can set VmaDefragmentationMove::operation to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. + + Alternatively, if you decide you want to completely remove the allocation: + + 1. Destroy its buffer/image. + 2. Set VmaDefragmentationMove::operation to #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY. + + Then, after vmaEndDefragmentationPass() the allocation will be freed. + */ + VmaDefragmentationMove* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(moveCount) pMoves; +} VmaDefragmentationPassMoveInfo; + +/// Statistics returned for defragmentation process in function vmaEndDefragmentation(). +typedef struct VmaDefragmentationStats +{ + /// Total number of bytes that have been copied while moving allocations to different places. + VkDeviceSize bytesMoved; + /// Total number of bytes that have been released to the system by freeing empty `VkDeviceMemory` objects. + VkDeviceSize bytesFreed; + /// Number of allocations that have been moved to different places. + uint32_t allocationsMoved; + /// Number of empty `VkDeviceMemory` objects that have been released to the system. + uint32_t deviceMemoryBlocksFreed; +} VmaDefragmentationStats; + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/// Parameters of created #VmaVirtualBlock object to be passed to vmaCreateVirtualBlock(). +typedef struct VmaVirtualBlockCreateInfo +{ + /** \brief Total size of the virtual block. + + Sizes can be expressed in bytes or any units you want as long as you are consistent in using them. + For example, if you allocate from some array of structures, 1 can mean single instance of entire structure. + */ + VkDeviceSize size; + + /** \brief Use combination of #VmaVirtualBlockCreateFlagBits. + */ + VmaVirtualBlockCreateFlags flags; + + /** \brief Custom CPU memory allocation callbacks. Optional. + + Optional, can be null. When specified, they will be used for all CPU-side memory allocations. + */ + const VkAllocationCallbacks* VMA_NULLABLE pAllocationCallbacks; +} VmaVirtualBlockCreateInfo; + +/// Parameters of created virtual allocation to be passed to vmaVirtualAllocate(). +typedef struct VmaVirtualAllocationCreateInfo +{ + /** \brief Size of the allocation. + + Cannot be zero. + */ + VkDeviceSize size; + /** \brief Required alignment of the allocation. Optional. + + Must be power of two. Special value 0 has the same meaning as 1 - means no special alignment is required, so allocation can start at any offset. + */ + VkDeviceSize alignment; + /** \brief Use combination of #VmaVirtualAllocationCreateFlagBits. + */ + VmaVirtualAllocationCreateFlags flags; + /** \brief Custom pointer to be associated with the allocation. Optional. + + It can be any value and can be used for user-defined purposes. It can be fetched or changed later. + */ + void* VMA_NULLABLE pUserData; +} VmaVirtualAllocationCreateInfo; + +/// Parameters of an existing virtual allocation, returned by vmaGetVirtualAllocationInfo(). +typedef struct VmaVirtualAllocationInfo +{ + /** \brief Offset of the allocation. + + Offset at which the allocation was made. + */ + VkDeviceSize offset; + /** \brief Size of the allocation. + + Same value as passed in VmaVirtualAllocationCreateInfo::size. + */ + VkDeviceSize size; + /** \brief Custom pointer associated with the allocation. + + Same value as passed in VmaVirtualAllocationCreateInfo::pUserData or to vmaSetVirtualAllocationUserData(). + */ + void* VMA_NULLABLE pUserData; +} VmaVirtualAllocationInfo; + +/** @} */ + +#endif // _VMA_DATA_TYPES_DECLARATIONS + +#ifndef _VMA_FUNCTION_HEADERS + +/** +\addtogroup group_init +@{ +*/ + +/// Creates #VmaAllocator object. +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator( + const VmaAllocatorCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocator VMA_NULLABLE* VMA_NOT_NULL pAllocator); + +/// Destroys allocator object. +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator( + VmaAllocator VMA_NULLABLE allocator); + +/** \brief Returns information about existing #VmaAllocator object - handle to Vulkan device etc. + +It might be useful if you want to keep just the #VmaAllocator handle and fetch other required handles to +`VkPhysicalDevice`, `VkDevice` etc. every time using this function. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocatorInfo* VMA_NOT_NULL pAllocatorInfo); + +/** +PhysicalDeviceProperties are fetched from physicalDevice by the allocator. +You can access it here, without fetching it again on your own. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties( + VmaAllocator VMA_NOT_NULL allocator, + const VkPhysicalDeviceProperties* VMA_NULLABLE* VMA_NOT_NULL ppPhysicalDeviceProperties); + +/** +PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator. +You can access it here, without fetching it again on your own. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE* VMA_NOT_NULL ppPhysicalDeviceMemoryProperties); + +/** +\brief Given Memory Type Index, returns Property Flags of this memory type. + +This is just a convenience function. Same information can be obtained using +vmaGetMemoryProperties(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeIndex, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); + +/** \brief Sets index of the current frame. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t frameIndex); + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Retrieves statistics from current state of the Allocator. + +This function is called "calculate" not "get" because it has to traverse all +internal data structures, so it may be quite slow. Use it for debugging purposes. +For faster but more brief statistics suitable to be called every frame or every allocation, +use vmaGetHeapBudgets(). + +Note that when using allocator from multiple threads, returned information may immediately +become outdated. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaTotalStatistics* VMA_NOT_NULL pStats); + +/** \brief Retrieves information about current memory usage and budget for all memory heaps. + +\param allocator +\param[out] pBudgets Must point to array with number of elements at least equal to number of memory heaps in physical device used. + +This function is called "get" not "calculate" because it is very fast, suitable to be called +every frame or every allocation. For more detailed statistics use vmaCalculateStatistics(). + +Note that when using allocator from multiple threads, returned information may immediately +become outdated. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets( + VmaAllocator VMA_NOT_NULL allocator, + VmaBudget* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pBudgets); + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** +\brief Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo. + +This algorithm tries to find a memory type that: + +- Is allowed by memoryTypeBits. +- Contains all the flags from pAllocationCreateInfo->requiredFlags. +- Matches intended usage. +- Has as many flags from pAllocationCreateInfo->preferredFlags as possible. + +\return Returns VK_ERROR_FEATURE_NOT_PRESENT if not found. Receiving such result +from this function or any other allocating function probably means that your +device doesn't support any memory type with requested features for the specific +type of resource you want to use it for. Please check parameters of your +resource, like image layout (OPTIMAL versus LINEAR) or mip level count. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** +\brief Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo. + +It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex. +It internally creates a temporary, dummy buffer that never has memory bound. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** +\brief Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo. + +It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex. +It internally creates a temporary, dummy image that never has memory bound. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo( + VmaAllocator VMA_NOT_NULL allocator, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** \brief Allocates Vulkan device memory and creates #VmaPool object. + +\param allocator Allocator object. +\param pCreateInfo Parameters of pool to create. +\param[out] pPool Handle to created pool. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool( + VmaAllocator VMA_NOT_NULL allocator, + const VmaPoolCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaPool VMA_NULLABLE* VMA_NOT_NULL pPool); + +/** \brief Destroys #VmaPool object and frees Vulkan device memory. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NULLABLE pool); + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Retrieves statistics of existing #VmaPool object. + +\param allocator Allocator object. +\param pool Pool object. +\param[out] pPoolStats Statistics of specified pool. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + VmaStatistics* VMA_NOT_NULL pPoolStats); + +/** \brief Retrieves detailed statistics of existing #VmaPool object. + +\param allocator Allocator object. +\param pool Pool object. +\param[out] pPoolStats Statistics of specified pool. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + VmaDetailedStatistics* VMA_NOT_NULL pPoolStats); + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** \brief Checks magic number in margins around all allocations in given memory pool in search for corruptions. + +Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero, +`VMA_DEBUG_MARGIN` is defined to nonzero and the pool is created in memory type that is +`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection). + +Possible return values: + +- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for specified pool. +- `VK_SUCCESS` - corruption detection has been performed and succeeded. +- `VK_ERROR_UNKNOWN` - corruption detection has been performed and found memory corruptions around one of the allocations. + `VMA_ASSERT` is also fired in that case. +- Other value: Error returned by Vulkan, e.g. memory mapping failure. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool); + +/** \brief Retrieves name of a custom pool. + +After the call `ppName` is either null or points to an internally-owned null-terminated string +containing name of the pool that was previously set. The pointer becomes invalid when the pool is +destroyed or its name is changed using vmaSetPoolName(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + const char* VMA_NULLABLE* VMA_NOT_NULL ppName); + +/** \brief Sets name of a custom pool. + +`pName` can be either null or pointer to a null-terminated string with new name for the pool. +Function makes internal copy of the string, so it can be changed or freed immediately after this call. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + const char* VMA_NULLABLE pName); + +/** \brief General purpose memory allocation. + +\param allocator +\param pVkMemoryRequirements +\param pCreateInfo +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages(). + +It is recommended to use vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(), +vmaCreateBuffer(), vmaCreateImage() instead whenever possible. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory( + VmaAllocator VMA_NOT_NULL allocator, + const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief General purpose memory allocation for multiple allocation objects at once. + +\param allocator Allocator object. +\param pVkMemoryRequirements Memory requirements for each allocation. +\param pCreateInfo Creation parameters for each allocation. +\param allocationCount Number of allocations to make. +\param[out] pAllocations Pointer to array that will be filled with handles to created allocations. +\param[out] pAllocationInfo Optional. Pointer to array that will be filled with parameters of created allocations. + +You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages(). + +Word "pages" is just a suggestion to use this function to allocate pieces of memory needed for sparse binding. +It is just a general purpose allocation function able to make multiple allocations at once. +It may be internally optimized to be more efficient than calling vmaAllocateMemory() `allocationCount` times. + +All allocations are made using same parameters. All of them are created out of the same memory pool and type. +If any allocation fails, all allocations already made within this function call are also freed, so that when +returned result is not `VK_SUCCESS`, `pAllocation` array is always entirely filled with `VK_NULL_HANDLE`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages( + VmaAllocator VMA_NOT_NULL allocator, + const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements, + const VmaAllocationCreateInfo* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pCreateInfo, + size_t allocationCount, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations, + VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo); + +/** \brief Allocates memory suitable for given `VkBuffer`. + +\param allocator +\param buffer +\param pCreateInfo +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +It only creates #VmaAllocation. To bind the memory to the buffer, use vmaBindBufferMemory(). + +This is a special-purpose function. In most cases you should use vmaCreateBuffer(). + +You must free the allocation using vmaFreeMemory() when no longer needed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Allocates memory suitable for given `VkImage`. + +\param allocator +\param image +\param pCreateInfo +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +It only creates #VmaAllocation. To bind the memory to the buffer, use vmaBindImageMemory(). + +This is a special-purpose function. In most cases you should use vmaCreateImage(). + +You must free the allocation using vmaFreeMemory() when no longer needed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage(). + +Passing `VK_NULL_HANDLE` as `allocation` is valid. Such function call is just skipped. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory( + VmaAllocator VMA_NOT_NULL allocator, + const VmaAllocation VMA_NULLABLE allocation); + +/** \brief Frees memory and destroys multiple allocations. + +Word "pages" is just a suggestion to use this function to free pieces of memory used for sparse binding. +It is just a general purpose function to free memory and destroy allocations made using e.g. vmaAllocateMemory(), +vmaAllocateMemoryPages() and other functions. +It may be internally optimized to be more efficient than calling vmaFreeMemory() `allocationCount` times. + +Allocations in `pAllocations` array can come from any memory pools and types. +Passing `VK_NULL_HANDLE` as elements of `pAllocations` array is valid. Such entries are just skipped. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages( + VmaAllocator VMA_NOT_NULL allocator, + size_t allocationCount, + const VmaAllocation VMA_NULLABLE* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations); + +/** \brief Returns current information about specified allocation. + +Current paramteres of given allocation are returned in `pAllocationInfo`. + +Although this function doesn't lock any mutex, so it should be quite efficient, +you should avoid calling it too often. +You can retrieve same VmaAllocationInfo structure while creating your resource, from function +vmaCreateBuffer(), vmaCreateImage(). You can remember it if you are sure parameters don't change +(e.g. due to defragmentation). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VmaAllocationInfo* VMA_NOT_NULL pAllocationInfo); + +/** \brief Sets pUserData in given allocation to new value. + +The value of pointer `pUserData` is copied to allocation's `pUserData`. +It is opaque, so you can use it however you want - e.g. +as a pointer, ordinal number or some handle to you own data. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + void* VMA_NULLABLE pUserData); + +/** \brief Sets pName in given allocation to new value. + +`pName` must be either null, or pointer to a null-terminated string. The function +makes local copy of the string and sets it as allocation's `pName`. String +passed as pName doesn't need to be valid for whole lifetime of the allocation - +you can free it after this call. String previously pointed by allocation's +`pName` is freed from memory. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const char* VMA_NULLABLE pName); + +/** +\brief Given an allocation, returns Property Flags of its memory type. + +This is just a convenience function. Same information can be obtained using +vmaGetAllocationInfo() + vmaGetMemoryProperties(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); + +/** \brief Maps memory represented by given allocation and returns pointer to it. + +Maps memory represented by given allocation to make it accessible to CPU code. +When succeeded, `*ppData` contains pointer to first byte of this memory. + +\warning +If the allocation is part of a bigger `VkDeviceMemory` block, returned pointer is +correctly offsetted to the beginning of region assigned to this particular allocation. +Unlike the result of `vkMapMemory`, it points to the allocation, not to the beginning of the whole block. +You should not add VmaAllocationInfo::offset to it! + +Mapping is internally reference-counted and synchronized, so despite raw Vulkan +function `vkMapMemory()` cannot be used to map same block of `VkDeviceMemory` +multiple times simultaneously, it is safe to call this function on allocations +assigned to the same memory block. Actual Vulkan memory will be mapped on first +mapping and unmapped on last unmapping. + +If the function succeeded, you must call vmaUnmapMemory() to unmap the +allocation when mapping is no longer needed or before freeing the allocation, at +the latest. + +It also safe to call this function multiple times on the same allocation. You +must call vmaUnmapMemory() same number of times as you called vmaMapMemory(). + +It is also safe to call this function on allocation created with +#VMA_ALLOCATION_CREATE_MAPPED_BIT flag. Its memory stays mapped all the time. +You must still call vmaUnmapMemory() same number of times as you called +vmaMapMemory(). You must not call vmaUnmapMemory() additional time to free the +"0-th" mapping made automatically due to #VMA_ALLOCATION_CREATE_MAPPED_BIT flag. + +This function fails when used on allocation made in memory type that is not +`HOST_VISIBLE`. + +This function doesn't automatically flush or invalidate caches. +If the allocation is made from a memory types that is not `HOST_COHERENT`, +you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + void* VMA_NULLABLE* VMA_NOT_NULL ppData); + +/** \brief Unmaps memory represented by given allocation, mapped previously using vmaMapMemory(). + +For details, see description of vmaMapMemory(). + +This function doesn't automatically flush or invalidate caches. +If the allocation is made from a memory types that is not `HOST_COHERENT`, +you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation); + +/** \brief Flushes memory of given allocation. + +Calls `vkFlushMappedMemoryRanges()` for memory associated with given range of given allocation. +It needs to be called after writing to a mapped memory for memory types that are not `HOST_COHERENT`. +Unmap operation doesn't do that automatically. + +- `offset` must be relative to the beginning of allocation. +- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation. +- `offset` and `size` don't have to be aligned. + They are internally rounded down/up to multiply of `nonCoherentAtomSize`. +- If `size` is 0, this call is ignored. +- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`, + this call is ignored. + +Warning! `offset` and `size` are relative to the contents of given `allocation`. +If you mean whole allocation, you can pass 0 and `VK_WHOLE_SIZE`, respectively. +Do not pass allocation's offset as `offset`!!! + +This function returns the `VkResult` from `vkFlushMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize offset, + VkDeviceSize size); + +/** \brief Invalidates memory of given allocation. + +Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given range of given allocation. +It needs to be called before reading from a mapped memory for memory types that are not `HOST_COHERENT`. +Map operation doesn't do that automatically. + +- `offset` must be relative to the beginning of allocation. +- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation. +- `offset` and `size` don't have to be aligned. + They are internally rounded down/up to multiply of `nonCoherentAtomSize`. +- If `size` is 0, this call is ignored. +- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`, + this call is ignored. + +Warning! `offset` and `size` are relative to the contents of given `allocation`. +If you mean whole allocation, you can pass 0 and `VK_WHOLE_SIZE`, respectively. +Do not pass allocation's offset as `offset`!!! + +This function returns the `VkResult` from `vkInvalidateMappedMemoryRanges` if +it is called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize offset, + VkDeviceSize size); + +/** \brief Flushes memory of given set of allocations. + +Calls `vkFlushMappedMemoryRanges()` for memory associated with given ranges of given allocations. +For more information, see documentation of vmaFlushAllocation(). + +\param allocator +\param allocationCount +\param allocations +\param offsets If not null, it must point to an array of offsets of regions to flush, relative to the beginning of respective allocations. Null means all ofsets are zero. +\param sizes If not null, it must point to an array of sizes of regions to flush in respective allocations. Null means `VK_WHOLE_SIZE` for all allocations. + +This function returns the `VkResult` from `vkFlushMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t allocationCount, + const VmaAllocation VMA_NOT_NULL* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes); + +/** \brief Invalidates memory of given set of allocations. + +Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given ranges of given allocations. +For more information, see documentation of vmaInvalidateAllocation(). + +\param allocator +\param allocationCount +\param allocations +\param offsets If not null, it must point to an array of offsets of regions to flush, relative to the beginning of respective allocations. Null means all ofsets are zero. +\param sizes If not null, it must point to an array of sizes of regions to flush in respective allocations. Null means `VK_WHOLE_SIZE` for all allocations. + +This function returns the `VkResult` from `vkInvalidateMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t allocationCount, + const VmaAllocation VMA_NOT_NULL* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes); + +/** \brief Checks magic number in margins around all allocations in given memory types (in both default and custom pools) in search for corruptions. + +\param allocator +\param memoryTypeBits Bit mask, where each bit set means that a memory type with that index should be checked. + +Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero, +`VMA_DEBUG_MARGIN` is defined to nonzero and only for memory types that are +`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection). + +Possible return values: + +- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for any of specified memory types. +- `VK_SUCCESS` - corruption detection has been performed and succeeded. +- `VK_ERROR_UNKNOWN` - corruption detection has been performed and found memory corruptions around one of the allocations. + `VMA_ASSERT` is also fired in that case. +- Other value: Error returned by Vulkan, e.g. memory mapping failure. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeBits); + +/** \brief Begins defragmentation process. + +\param allocator Allocator object. +\param pInfo Structure filled with parameters of defragmentation. +\param[out] pContext Context object that must be passed to vmaEndDefragmentation() to finish defragmentation. +\returns +- `VK_SUCCESS` if defragmentation can begin. +- `VK_ERROR_FEATURE_NOT_PRESENT` if defragmentation is not supported. + +For more information about defragmentation, see documentation chapter: +[Defragmentation](@ref defragmentation). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation( + VmaAllocator VMA_NOT_NULL allocator, + const VmaDefragmentationInfo* VMA_NOT_NULL pInfo, + VmaDefragmentationContext VMA_NULLABLE* VMA_NOT_NULL pContext); + +/** \brief Ends defragmentation process. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param[out] pStats Optional stats for the defragmentation. Can be null. + +Use this function to finish defragmentation started by vmaBeginDefragmentation(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationStats* VMA_NULLABLE pStats); + +/** \brief Starts single defragmentation pass. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param[out] pPassInfo Computed informations for current pass. +\returns +- `VK_SUCCESS` if no more moves are possible. Then you can omit call to vmaEndDefragmentationPass() and simply end whole defragmentation. +- `VK_INCOMPLETE` if there are pending moves returned in `pPassInfo`. You need to perform them, call vmaEndDefragmentationPass(), + and then preferably try another pass with vmaBeginDefragmentationPass(). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo); + +/** \brief Ends single defragmentation pass. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param pPassInfo Computed informations for current pass filled by vmaBeginDefragmentationPass() and possibly modified by you. + +Returns `VK_SUCCESS` if no more moves are possible or `VK_INCOMPLETE` if more defragmentations are possible. + +Ends incremental defragmentation pass and commits all defragmentation moves from `pPassInfo`. +After this call: + +- Allocations at `pPassInfo[i].srcAllocation` that had `pPassInfo[i].operation ==` #VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY + (which is the default) will be pointing to the new destination place. +- Allocation at `pPassInfo[i].srcAllocation` that had `pPassInfo[i].operation ==` #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY + will be freed. + +If no more moves are possible you can end whole defragmentation. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo); + +/** \brief Binds buffer to allocation. + +Binds specified buffer to region of memory represented by specified allocation. +Gets `VkDeviceMemory` handle and offset from the allocation. +If you want to create a buffer, allocate memory for it and bind them together separately, +you should use this function for binding instead of standard `vkBindBufferMemory()`, +because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple +allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously +(which is illegal in Vulkan). + +It is recommended to use function vmaCreateBuffer() instead of this one. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer); + +/** \brief Binds buffer to allocation with additional parameters. + +\param allocator +\param allocation +\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the `allocation`. Normally it should be 0. +\param buffer +\param pNext A chain of structures to be attached to `VkBindBufferMemoryInfoKHR` structure used internally. Normally it should be null. + +This function is similar to vmaBindBufferMemory(), but it provides additional parameters. + +If `pNext` is not null, #VmaAllocator object must have been created with #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT flag +or with VmaAllocatorCreateInfo::vulkanApiVersion `>= VK_API_VERSION_1_1`. Otherwise the call fails. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, + const void* VMA_NULLABLE pNext); + +/** \brief Binds image to allocation. + +Binds specified image to region of memory represented by specified allocation. +Gets `VkDeviceMemory` handle and offset from the allocation. +If you want to create an image, allocate memory for it and bind them together separately, +you should use this function for binding instead of standard `vkBindImageMemory()`, +because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple +allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously +(which is illegal in Vulkan). + +It is recommended to use function vmaCreateImage() instead of this one. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image); + +/** \brief Binds image to allocation with additional parameters. + +\param allocator +\param allocation +\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the `allocation`. Normally it should be 0. +\param image +\param pNext A chain of structures to be attached to `VkBindImageMemoryInfoKHR` structure used internally. Normally it should be null. + +This function is similar to vmaBindImageMemory(), but it provides additional parameters. + +If `pNext` is not null, #VmaAllocator object must have been created with #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT flag +or with VmaAllocatorCreateInfo::vulkanApiVersion `>= VK_API_VERSION_1_1`. Otherwise the call fails. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, + const void* VMA_NULLABLE pNext); + +/** \brief Creates a new `VkBuffer`, allocates and binds memory for it. + +\param allocator +\param pBufferCreateInfo +\param pAllocationCreateInfo +\param[out] pBuffer Buffer that was created. +\param[out] pAllocation Allocation that was created. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +This function automatically: + +-# Creates buffer. +-# Allocates appropriate memory for it. +-# Binds the buffer with the memory. + +If any of these operations fail, buffer and allocation are not created, +returned value is negative error code, `*pBuffer` and `*pAllocation` are null. + +If the function succeeded, you must destroy both buffer and allocation when you +no longer need them using either convenience function vmaDestroyBuffer() or +separately, using `vkDestroyBuffer()` and vmaFreeMemory(). + +If #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag was used, +VK_KHR_dedicated_allocation extension is used internally to query driver whether +it requires or prefers the new buffer to have dedicated allocation. If yes, +and if dedicated allocation is possible +(#VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT is not used), it creates dedicated +allocation for this buffer, just like when using +#VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + +\note This function creates a new `VkBuffer`. Sub-allocation of parts of one large buffer, +although recommended as a good practice, is out of scope of this library and could be implemented +by the user as a higher-level logic on top of VMA. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Creates a buffer with additional minimum alignment. + +Similar to vmaCreateBuffer() but provides additional parameter `minAlignment` which allows to specify custom, +minimum alignment to be used when placing the buffer inside a larger memory block, which may be needed e.g. +for interop with OpenGL. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkDeviceSize minAlignment, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Creates a new `VkBuffer`, binds already created memory for it. + +\param allocator +\param allocation Allocation that provides memory to be used for binding new buffer to it. +\param pBufferCreateInfo +\param[out] pBuffer Buffer that was created. + +This function automatically: + +-# Creates buffer. +-# Binds the buffer with the supplied memory. + +If any of these operations fail, buffer is not created, +returned value is negative error code and `*pBuffer` is null. + +If the function succeeded, you must destroy the buffer when you +no longer need it using `vkDestroyBuffer()`. If you want to also destroy the corresponding +allocation you can use convenience function vmaDestroyBuffer(). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer); + +/** \brief Destroys Vulkan buffer and frees allocated memory. + +This is just a convenience function equivalent to: + +\code +vkDestroyBuffer(device, buffer, allocationCallbacks); +vmaFreeMemory(allocator, allocation); +\endcode + +It it safe to pass null as buffer and/or allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer, + VmaAllocation VMA_NULLABLE allocation); + +/// Function similar to vmaCreateBuffer(). +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( + VmaAllocator VMA_NOT_NULL allocator, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/// Function similar to vmaCreateAliasingBuffer(). +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage); + +/** \brief Destroys Vulkan image and frees allocated memory. + +This is just a convenience function equivalent to: + +\code +vkDestroyImage(device, image, allocationCallbacks); +vmaFreeMemory(allocator, allocation); +\endcode + +It it safe to pass null as image and/or allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NULLABLE_NON_DISPATCHABLE image, + VmaAllocation VMA_NULLABLE allocation); + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/** \brief Creates new #VmaVirtualBlock object. + +\param pCreateInfo Parameters for creation. +\param[out] pVirtualBlock Returned virtual block object or `VMA_NULL` if creation failed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock( + const VmaVirtualBlockCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualBlock VMA_NULLABLE* VMA_NOT_NULL pVirtualBlock); + +/** \brief Destroys #VmaVirtualBlock object. + +Please note that you should consciously handle virtual allocations that could remain unfreed in the block. +You should either free them individually using vmaVirtualFree() or call vmaClearVirtualBlock() +if you are sure this is what you want. If you do neither, an assert is called. + +If you keep pointers to some additional metadata associated with your virtual allocations in their `pUserData`, +don't forget to free them. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock( + VmaVirtualBlock VMA_NULLABLE virtualBlock); + +/** \brief Returns true of the #VmaVirtualBlock is empty - contains 0 virtual allocations and has all its space available for new allocations. +*/ +VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty( + VmaVirtualBlock VMA_NOT_NULL virtualBlock); + +/** \brief Returns information about a specific virtual allocation within a virtual block, like its size and `pUserData` pointer. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo* VMA_NOT_NULL pVirtualAllocInfo); + +/** \brief Allocates new virtual allocation inside given #VmaVirtualBlock. + +If the allocation fails due to not enough free space available, `VK_ERROR_OUT_OF_DEVICE_MEMORY` is returned +(despite the function doesn't ever allocate actual GPU memory). +`pAllocation` is then set to `VK_NULL_HANDLE` and `pOffset`, if not null, it set to `UINT64_MAX`. + +\param virtualBlock Virtual block +\param pCreateInfo Parameters for the allocation +\param[out] pAllocation Returned handle of the new allocation +\param[out] pOffset Returned offset of the new allocation. Optional, can be null. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + const VmaVirtualAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pAllocation, + VkDeviceSize* VMA_NULLABLE pOffset); + +/** \brief Frees virtual allocation inside given #VmaVirtualBlock. + +It is correct to call this function with `allocation == VK_NULL_HANDLE` - it does nothing. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation); + +/** \brief Frees all virtual allocations inside given #VmaVirtualBlock. + +You must either call this function or free each virtual allocation individually with vmaVirtualFree() +before destroying a virtual block. Otherwise, an assert is called. + +If you keep pointer to some additional metadata associated with your virtual allocation in its `pUserData`, +don't forget to free it as well. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock( + VmaVirtualBlock VMA_NOT_NULL virtualBlock); + +/** \brief Changes custom pointer associated with given virtual allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, + void* VMA_NULLABLE pUserData); + +/** \brief Calculates and returns statistics about virtual allocations and memory usage in given #VmaVirtualBlock. + +This function is fast to call. For more detailed statistics, see vmaCalculateVirtualBlockStatistics(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaStatistics* VMA_NOT_NULL pStats); + +/** \brief Calculates and returns detailed statistics about virtual allocations and memory usage in given #VmaVirtualBlock. + +This function is slow to call. Use for debugging purposes. +For less detailed statistics, see vmaGetVirtualBlockStatistics(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaDetailedStatistics* VMA_NOT_NULL pStats); + +/** @} */ + +#if VMA_STATS_STRING_ENABLED +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Builds and returns a null-terminated string in JSON format with information about given #VmaVirtualBlock. +\param virtualBlock Virtual block. +\param[out] ppStatsString Returned string. +\param detailedMap Pass `VK_FALSE` to only obtain statistics as returned by vmaCalculateVirtualBlockStatistics(). Pass `VK_TRUE` to also obtain full list of allocations and free spaces. + +Returned string must be freed using vmaFreeVirtualBlockStatsString(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE* VMA_NOT_NULL ppStatsString, + VkBool32 detailedMap); + +/// Frees a string returned by vmaBuildVirtualBlockStatsString(). +VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE pStatsString); + +/** \brief Builds and returns statistics as a null-terminated string in JSON format. +\param allocator +\param[out] ppStatsString Must be freed using vmaFreeStatsString() function. +\param detailedMap +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString( + VmaAllocator VMA_NOT_NULL allocator, + char* VMA_NULLABLE* VMA_NOT_NULL ppStatsString, + VkBool32 detailedMap); + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString( + VmaAllocator VMA_NOT_NULL allocator, + char* VMA_NULLABLE pStatsString); + +/** @} */ + +#endif // VMA_STATS_STRING_ENABLED + +#endif // _VMA_FUNCTION_HEADERS + +#ifdef __cplusplus +} +#endif + +#endif // AMD_VULKAN_MEMORY_ALLOCATOR_H + +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// +// +// IMPLEMENTATION +// +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// For Visual Studio IntelliSense. +#if defined(__cplusplus) && defined(__INTELLISENSE__) +#define VMA_IMPLEMENTATION +#endif + +#ifdef VMA_IMPLEMENTATION +#undef VMA_IMPLEMENTATION + +#include +#include +#include +#include +#include + +#ifdef _MSC_VER + #include // For functions like __popcnt, _BitScanForward etc. +#endif +#if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 + #include // For std::popcount +#endif + +/******************************************************************************* +CONFIGURATION SECTION + +Define some of these macros before each #include of this header or change them +here if you need other then default behavior depending on your environment. +*/ +#ifndef _VMA_CONFIGURATION + +/* +Define this macro to 1 to make the library fetch pointers to Vulkan functions +internally, like: + + vulkanFunctions.vkAllocateMemory = &vkAllocateMemory; +*/ +#if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) + #define VMA_STATIC_VULKAN_FUNCTIONS 1 +#endif + +/* +Define this macro to 1 to make the library fetch pointers to Vulkan functions +internally, like: + + vulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkGetDeviceProcAddr(device, "vkAllocateMemory"); + +To use this feature in new versions of VMA you now have to pass +VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as +VmaAllocatorCreateInfo::pVulkanFunctions. Other members can be null. +*/ +#if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS) + #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1 +#endif + +#ifndef VMA_USE_STL_SHARED_MUTEX + // Compiler conforms to C++17. + #if __cplusplus >= 201703L + #define VMA_USE_STL_SHARED_MUTEX 1 + // Visual studio defines __cplusplus properly only when passed additional parameter: /Zc:__cplusplus + // Otherwise it is always 199711L, despite shared_mutex works since Visual Studio 2015 Update 2. + #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L + #define VMA_USE_STL_SHARED_MUTEX 1 + #else + #define VMA_USE_STL_SHARED_MUTEX 0 + #endif +#endif + +/* +Define this macro to include custom header files without having to edit this file directly, e.g.: + + // Inside of "my_vma_configuration_user_includes.h": + + #include "my_custom_assert.h" // for MY_CUSTOM_ASSERT + #include "my_custom_min.h" // for my_custom_min + #include + #include + + // Inside a different file, which includes "vk_mem_alloc.h": + + #define VMA_CONFIGURATION_USER_INCLUDES_H "my_vma_configuration_user_includes.h" + #define VMA_ASSERT(expr) MY_CUSTOM_ASSERT(expr) + #define VMA_MIN(v1, v2) (my_custom_min(v1, v2)) + #include "vk_mem_alloc.h" + ... + +The following headers are used in this CONFIGURATION section only, so feel free to +remove them if not needed. +*/ +#if !defined(VMA_CONFIGURATION_USER_INCLUDES_H) + #include // for assert + #include // for min, max + #include +#else + #include VMA_CONFIGURATION_USER_INCLUDES_H +#endif + +#ifndef VMA_NULL + // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0. + #define VMA_NULL nullptr +#endif + +#if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) +#include +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + // alignment must be >= sizeof(void*) + if(alignment < sizeof(void*)) + { + alignment = sizeof(void*); + } + + return memalign(alignment, size); +} +#elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC)) +#include + +#if defined(__APPLE__) +#include +#endif + +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + // Unfortunately, aligned_alloc causes VMA to crash due to it returning null pointers. (At least under 11.4) + // Therefore, for now disable this specific exception until a proper solution is found. + //#if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0)) + //#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0 + // // For C++14, usr/include/malloc/_malloc.h declares aligned_alloc()) only + // // with the MacOSX11.0 SDK in Xcode 12 (which is what adds + // // MAC_OS_X_VERSION_10_16), even though the function is marked + // // availabe for 10.15. That is why the preprocessor checks for 10.16 but + // // the __builtin_available checks for 10.15. + // // People who use C++17 could call aligned_alloc with the 10.15 SDK already. + // if (__builtin_available(macOS 10.15, iOS 13, *)) + // return aligned_alloc(alignment, size); + //#endif + //#endif + + // alignment must be >= sizeof(void*) + if(alignment < sizeof(void*)) + { + alignment = sizeof(void*); + } + + void *pointer; + if(posix_memalign(&pointer, alignment, size) == 0) + return pointer; + return VMA_NULL; +} +#elif defined(_WIN32) +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + return _aligned_malloc(size, alignment); +} +#else +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + return aligned_alloc(alignment, size); +} +#endif + +#if defined(_WIN32) +static void vma_aligned_free(void* ptr) +{ + _aligned_free(ptr); +} +#else +static void vma_aligned_free(void* VMA_NULLABLE ptr) +{ + free(ptr); +} +#endif + +// If your compiler is not compatible with C++11 and definition of +// aligned_alloc() function is missing, uncommeting following line may help: + +//#include + +// Normal assert to check for programmer's errors, especially in Debug configuration. +#ifndef VMA_ASSERT + #ifdef NDEBUG + #define VMA_ASSERT(expr) + #else + #define VMA_ASSERT(expr) assert(expr) + #endif +#endif + +// Assert that will be called very often, like inside data structures e.g. operator[]. +// Making it non-empty can make program slow. +#ifndef VMA_HEAVY_ASSERT + #ifdef NDEBUG + #define VMA_HEAVY_ASSERT(expr) + #else + #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) + #endif +#endif + +#ifndef VMA_ALIGN_OF + #define VMA_ALIGN_OF(type) (__alignof(type)) +#endif + +#ifndef VMA_SYSTEM_ALIGNED_MALLOC + #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size)) +#endif + +#ifndef VMA_SYSTEM_ALIGNED_FREE + // VMA_SYSTEM_FREE is the old name, but might have been defined by the user + #if defined(VMA_SYSTEM_FREE) + #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr) + #else + #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr) + #endif +#endif + +#ifndef VMA_COUNT_BITS_SET + // Returns number of bits set to 1 in (v) + #define VMA_COUNT_BITS_SET(v) VmaCountBitsSet(v) +#endif + +#ifndef VMA_BITSCAN_LSB + // Scans integer for index of first nonzero value from the Least Significant Bit (LSB). If mask is 0 then returns UINT8_MAX + #define VMA_BITSCAN_LSB(mask) VmaBitScanLSB(mask) +#endif + +#ifndef VMA_BITSCAN_MSB + // Scans integer for index of first nonzero value from the Most Significant Bit (MSB). If mask is 0 then returns UINT8_MAX + #define VMA_BITSCAN_MSB(mask) VmaBitScanMSB(mask) +#endif + +#ifndef VMA_MIN + #define VMA_MIN(v1, v2) ((std::min)((v1), (v2))) +#endif + +#ifndef VMA_MAX + #define VMA_MAX(v1, v2) ((std::max)((v1), (v2))) +#endif + +#ifndef VMA_SWAP + #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) +#endif + +#ifndef VMA_SORT + #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) +#endif + +#ifndef VMA_DEBUG_LOG + #define VMA_DEBUG_LOG(format, ...) + /* + #define VMA_DEBUG_LOG(format, ...) do { \ + printf(format, __VA_ARGS__); \ + printf("\n"); \ + } while(false) + */ +#endif + +// Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString. +#if VMA_STATS_STRING_ENABLED + static inline void VmaUint32ToStr(char* VMA_NOT_NULL outStr, size_t strLen, uint32_t num) + { + snprintf(outStr, strLen, "%u", static_cast(num)); + } + static inline void VmaUint64ToStr(char* VMA_NOT_NULL outStr, size_t strLen, uint64_t num) + { + snprintf(outStr, strLen, "%llu", static_cast(num)); + } + static inline void VmaPtrToStr(char* VMA_NOT_NULL outStr, size_t strLen, const void* ptr) + { + snprintf(outStr, strLen, "%p", ptr); + } +#endif + +#ifndef VMA_MUTEX + class VmaMutex + { + public: + void Lock() { m_Mutex.lock(); } + void Unlock() { m_Mutex.unlock(); } + bool TryLock() { return m_Mutex.try_lock(); } + private: + std::mutex m_Mutex; + }; + #define VMA_MUTEX VmaMutex +#endif + +// Read-write mutex, where "read" is shared access, "write" is exclusive access. +#ifndef VMA_RW_MUTEX + #if VMA_USE_STL_SHARED_MUTEX + // Use std::shared_mutex from C++17. + #include + class VmaRWMutex + { + public: + void LockRead() { m_Mutex.lock_shared(); } + void UnlockRead() { m_Mutex.unlock_shared(); } + bool TryLockRead() { return m_Mutex.try_lock_shared(); } + void LockWrite() { m_Mutex.lock(); } + void UnlockWrite() { m_Mutex.unlock(); } + bool TryLockWrite() { return m_Mutex.try_lock(); } + private: + std::shared_mutex m_Mutex; + }; + #define VMA_RW_MUTEX VmaRWMutex + #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 + // Use SRWLOCK from WinAPI. + // Minimum supported client = Windows Vista, server = Windows Server 2008. + class VmaRWMutex + { + public: + VmaRWMutex() { InitializeSRWLock(&m_Lock); } + void LockRead() { AcquireSRWLockShared(&m_Lock); } + void UnlockRead() { ReleaseSRWLockShared(&m_Lock); } + bool TryLockRead() { return TryAcquireSRWLockShared(&m_Lock) != FALSE; } + void LockWrite() { AcquireSRWLockExclusive(&m_Lock); } + void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); } + bool TryLockWrite() { return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; } + private: + SRWLOCK m_Lock; + }; + #define VMA_RW_MUTEX VmaRWMutex + #else + // Less efficient fallback: Use normal mutex. + class VmaRWMutex + { + public: + void LockRead() { m_Mutex.Lock(); } + void UnlockRead() { m_Mutex.Unlock(); } + bool TryLockRead() { return m_Mutex.TryLock(); } + void LockWrite() { m_Mutex.Lock(); } + void UnlockWrite() { m_Mutex.Unlock(); } + bool TryLockWrite() { return m_Mutex.TryLock(); } + private: + VMA_MUTEX m_Mutex; + }; + #define VMA_RW_MUTEX VmaRWMutex + #endif // #if VMA_USE_STL_SHARED_MUTEX +#endif // #ifndef VMA_RW_MUTEX + +/* +If providing your own implementation, you need to implement a subset of std::atomic. +*/ +#ifndef VMA_ATOMIC_UINT32 + #include + #define VMA_ATOMIC_UINT32 std::atomic +#endif + +#ifndef VMA_ATOMIC_UINT64 + #include + #define VMA_ATOMIC_UINT64 std::atomic +#endif + +#ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY + /** + Every allocation will have its own memory block. + Define to 1 for debugging purposes only. + */ + #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) +#endif + +#ifndef VMA_MIN_ALIGNMENT + /** + Minimum alignment of all allocations, in bytes. + Set to more than 1 for debugging purposes. Must be power of two. + */ + #ifdef VMA_DEBUG_ALIGNMENT // Old name + #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT + #else + #define VMA_MIN_ALIGNMENT (1) + #endif +#endif + +#ifndef VMA_DEBUG_MARGIN + /** + Minimum margin after every allocation, in bytes. + Set nonzero for debugging purposes only. + */ + #define VMA_DEBUG_MARGIN (0) +#endif + +#ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS + /** + Define this macro to 1 to automatically fill new allocations and destroyed + allocations with some bit pattern. + */ + #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) +#endif + +#ifndef VMA_DEBUG_DETECT_CORRUPTION + /** + Define this macro to 1 together with non-zero value of VMA_DEBUG_MARGIN to + enable writing magic value to the margin after every allocation and + validating it, so that memory corruptions (out-of-bounds writes) are detected. + */ + #define VMA_DEBUG_DETECT_CORRUPTION (0) +#endif + +#ifndef VMA_DEBUG_GLOBAL_MUTEX + /** + Set this to 1 for debugging purposes only, to enable single mutex protecting all + entry calls to the library. Can be useful for debugging multithreading issues. + */ + #define VMA_DEBUG_GLOBAL_MUTEX (0) +#endif + +#ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY + /** + Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity. + Set to more than 1 for debugging purposes only. Must be power of two. + */ + #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) +#endif + +#ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT + /* + Set this to 1 to make VMA never exceed VkPhysicalDeviceLimits::maxMemoryAllocationCount + and return error instead of leaving up to Vulkan implementation what to do in such cases. + */ + #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0) +#endif + +#ifndef VMA_SMALL_HEAP_MAX_SIZE + /// Maximum size of a memory heap in Vulkan to consider it "small". + #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) +#endif + +#ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE + /// Default size of a block allocated as single VkDeviceMemory from a "large" heap. + #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) +#endif + +/* +Mapping hysteresis is a logic that launches when vmaMapMemory/vmaUnmapMemory is called +or a persistently mapped allocation is created and destroyed several times in a row. +It keeps additional +1 mapping of a device memory block to prevent calling actual +vkMapMemory/vkUnmapMemory too many times, which may improve performance and help +tools like RenderDOc. +*/ +#ifndef VMA_MAPPING_HYSTERESIS_ENABLED + #define VMA_MAPPING_HYSTERESIS_ENABLED 1 +#endif + +#ifndef VMA_CLASS_NO_COPY + #define VMA_CLASS_NO_COPY(className) \ + private: \ + className(const className&) = delete; \ + className& operator=(const className&) = delete; +#endif + +#define VMA_VALIDATE(cond) do { if(!(cond)) { \ + VMA_ASSERT(0 && "Validation failed: " #cond); \ + return false; \ + } } while(false) + +/******************************************************************************* +END OF CONFIGURATION +*/ +#endif // _VMA_CONFIGURATION + + +static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC; +static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF; +// Decimal 2139416166, float NaN, little-endian binary 66 E6 84 7F. +static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666; + +// Copy of some Vulkan definitions so we don't need to check their existence just to handle few constants. +static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040; +static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080; +static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000; +static const uint32_t VK_IMAGE_CREATE_DISJOINT_BIT_COPY = 0x00000200; +static const int32_t VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY = 1000158000; +static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u; +static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32; +static const uint32_t VMA_VENDOR_ID_AMD = 4098; + +// This one is tricky. Vulkan specification defines this code as available since +// Vulkan 1.0, but doesn't actually define it in Vulkan SDK earlier than 1.2.131. +// See pull request #207. +#define VK_ERROR_UNKNOWN_COPY ((VkResult)-13) + + +#if VMA_STATS_STRING_ENABLED +// Correspond to values of enum VmaSuballocationType. +static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = +{ + "FREE", + "UNKNOWN", + "BUFFER", + "IMAGE_UNKNOWN", + "IMAGE_LINEAR", + "IMAGE_OPTIMAL", +}; +#endif + +static VkAllocationCallbacks VmaEmptyAllocationCallbacks = + { VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL }; + + +#ifndef _VMA_ENUM_DECLARATIONS + +enum VmaSuballocationType +{ + VMA_SUBALLOCATION_TYPE_FREE = 0, + VMA_SUBALLOCATION_TYPE_UNKNOWN = 1, + VMA_SUBALLOCATION_TYPE_BUFFER = 2, + VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3, + VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4, + VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5, + VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +}; + +enum VMA_CACHE_OPERATION +{ + VMA_CACHE_FLUSH, + VMA_CACHE_INVALIDATE +}; + +enum class VmaAllocationRequestType +{ + Normal, + TLSF, + // Used by "Linear" algorithm. + UpperAddress, + EndOf1st, + EndOf2nd, +}; + +#endif // _VMA_ENUM_DECLARATIONS + +#ifndef _VMA_FORWARD_DECLARATIONS +// Opaque handle used by allocation algorithms to identify single allocation in any conforming way. +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaAllocHandle); + +struct VmaMutexLock; +struct VmaMutexLockRead; +struct VmaMutexLockWrite; + +template +struct AtomicTransactionalIncrement; + +template +struct VmaStlAllocator; + +template +class VmaVector; + +template +class VmaSmallVector; + +template +class VmaPoolAllocator; + +template +struct VmaListItem; + +template +class VmaRawList; + +template +class VmaList; + +template +class VmaIntrusiveLinkedList; + +// Unused in this version +#if 0 +template +struct VmaPair; +template +struct VmaPairFirstLess; + +template +class VmaMap; +#endif + +#if VMA_STATS_STRING_ENABLED +class VmaStringBuilder; +class VmaJsonWriter; +#endif + +class VmaDeviceMemoryBlock; + +struct VmaDedicatedAllocationListItemTraits; +class VmaDedicatedAllocationList; + +struct VmaSuballocation; +struct VmaSuballocationOffsetLess; +struct VmaSuballocationOffsetGreater; +struct VmaSuballocationItemSizeLess; + +typedef VmaList> VmaSuballocationList; + +struct VmaAllocationRequest; + +class VmaBlockMetadata; +class VmaBlockMetadata_Linear; +class VmaBlockMetadata_TLSF; + +class VmaBlockVector; + +struct VmaPoolListItemTraits; + +struct VmaCurrentBudgetData; + +class VmaAllocationObjectAllocator; + +#endif // _VMA_FORWARD_DECLARATIONS + + +#ifndef _VMA_FUNCTIONS + +/* +Returns number of bits set to 1 in (v). + +On specific platforms and compilers you can use instrinsics like: + +Visual Studio: + return __popcnt(v); +GCC, Clang: + return static_cast(__builtin_popcount(v)); + +Define macro VMA_COUNT_BITS_SET to provide your optimized implementation. +But you need to check in runtime whether user's CPU supports these, as some old processors don't. +*/ +static inline uint32_t VmaCountBitsSet(uint32_t v) +{ +#if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 + return std::popcount(v); +#else + uint32_t c = v - ((v >> 1) & 0x55555555); + c = ((c >> 2) & 0x33333333) + (c & 0x33333333); + c = ((c >> 4) + c) & 0x0F0F0F0F; + c = ((c >> 8) + c) & 0x00FF00FF; + c = ((c >> 16) + c) & 0x0000FFFF; + return c; +#endif +} + +static inline uint8_t VmaBitScanLSB(uint64_t mask) +{ +#if defined(_MSC_VER) && defined(_WIN64) + unsigned long pos; + if (_BitScanForward64(&pos, mask)) + return static_cast(pos); + return UINT8_MAX; +#elif defined __GNUC__ || defined __clang__ + return static_cast(__builtin_ffsll(mask)) - 1U; +#else + uint8_t pos = 0; + uint64_t bit = 1; + do + { + if (mask & bit) + return pos; + bit <<= 1; + } while (pos++ < 63); + return UINT8_MAX; +#endif +} + +static inline uint8_t VmaBitScanLSB(uint32_t mask) +{ +#ifdef _MSC_VER + unsigned long pos; + if (_BitScanForward(&pos, mask)) + return static_cast(pos); + return UINT8_MAX; +#elif defined __GNUC__ || defined __clang__ + return static_cast(__builtin_ffs(mask)) - 1U; +#else + uint8_t pos = 0; + uint32_t bit = 1; + do + { + if (mask & bit) + return pos; + bit <<= 1; + } while (pos++ < 31); + return UINT8_MAX; +#endif +} + +static inline uint8_t VmaBitScanMSB(uint64_t mask) +{ +#if defined(_MSC_VER) && defined(_WIN64) + unsigned long pos; + if (_BitScanReverse64(&pos, mask)) + return static_cast(pos); +#elif defined __GNUC__ || defined __clang__ + if (mask) + return 63 - static_cast(__builtin_clzll(mask)); +#else + uint8_t pos = 63; + uint64_t bit = 1ULL << 63; + do + { + if (mask & bit) + return pos; + bit >>= 1; + } while (pos-- > 0); +#endif + return UINT8_MAX; +} + +static inline uint8_t VmaBitScanMSB(uint32_t mask) +{ +#ifdef _MSC_VER + unsigned long pos; + if (_BitScanReverse(&pos, mask)) + return static_cast(pos); +#elif defined __GNUC__ || defined __clang__ + if (mask) + return 31 - static_cast(__builtin_clz(mask)); +#else + uint8_t pos = 31; + uint32_t bit = 1UL << 31; + do + { + if (mask & bit) + return pos; + bit >>= 1; + } while (pos-- > 0); +#endif + return UINT8_MAX; +} + +/* +Returns true if given number is a power of two. +T must be unsigned integer number or signed integer but always nonnegative. +For 0 returns true. +*/ +template +inline bool VmaIsPow2(T x) +{ + return (x & (x - 1)) == 0; +} + +// Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16. +// Use types like uint32_t, uint64_t as T. +template +static inline T VmaAlignUp(T val, T alignment) +{ + VMA_HEAVY_ASSERT(VmaIsPow2(alignment)); + return (val + alignment - 1) & ~(alignment - 1); +} + +// Aligns given value down to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 8. +// Use types like uint32_t, uint64_t as T. +template +static inline T VmaAlignDown(T val, T alignment) +{ + VMA_HEAVY_ASSERT(VmaIsPow2(alignment)); + return val & ~(alignment - 1); +} + +// Division with mathematical rounding to nearest number. +template +static inline T VmaRoundDiv(T x, T y) +{ + return (x + (y / (T)2)) / y; +} + +// Divide by 'y' and round up to nearest integer. +template +static inline T VmaDivideRoundingUp(T x, T y) +{ + return (x + y - (T)1) / y; +} + +// Returns smallest power of 2 greater or equal to v. +static inline uint32_t VmaNextPow2(uint32_t v) +{ + v--; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v++; + return v; +} + +static inline uint64_t VmaNextPow2(uint64_t v) +{ + v--; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v |= v >> 32; + v++; + return v; +} + +// Returns largest power of 2 less or equal to v. +static inline uint32_t VmaPrevPow2(uint32_t v) +{ + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v = v ^ (v >> 1); + return v; +} + +static inline uint64_t VmaPrevPow2(uint64_t v) +{ + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v |= v >> 32; + v = v ^ (v >> 1); + return v; +} + +static inline bool VmaStrIsEmpty(const char* pStr) +{ + return pStr == VMA_NULL || *pStr == '\0'; +} + +/* +Returns true if two memory blocks occupy overlapping pages. +ResourceA must be in less memory offset than ResourceB. + +Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)" +chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity". +*/ +static inline bool VmaBlocksOnSamePage( + VkDeviceSize resourceAOffset, + VkDeviceSize resourceASize, + VkDeviceSize resourceBOffset, + VkDeviceSize pageSize) +{ + VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0); + VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1; + VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1); + VkDeviceSize resourceBStart = resourceBOffset; + VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1); + return resourceAEndPage == resourceBStartPage; +} + +/* +Returns true if given suballocation types could conflict and must respect +VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer +or linear image and another one is optimal image. If type is unknown, behave +conservatively. +*/ +static inline bool VmaIsBufferImageGranularityConflict( + VmaSuballocationType suballocType1, + VmaSuballocationType suballocType2) +{ + if (suballocType1 > suballocType2) + { + VMA_SWAP(suballocType1, suballocType2); + } + + switch (suballocType1) + { + case VMA_SUBALLOCATION_TYPE_FREE: + return false; + case VMA_SUBALLOCATION_TYPE_UNKNOWN: + return true; + case VMA_SUBALLOCATION_TYPE_BUFFER: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL: + return false; + default: + VMA_ASSERT(0); + return true; + } +} + +static void VmaWriteMagicValue(void* pData, VkDeviceSize offset) +{ +#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION + uint32_t* pDst = (uint32_t*)((char*)pData + offset); + const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t); + for (size_t i = 0; i < numberCount; ++i, ++pDst) + { + *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE; + } +#else + // no-op +#endif +} + +static bool VmaValidateMagicValue(const void* pData, VkDeviceSize offset) +{ +#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION + const uint32_t* pSrc = (const uint32_t*)((const char*)pData + offset); + const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t); + for (size_t i = 0; i < numberCount; ++i, ++pSrc) + { + if (*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE) + { + return false; + } + } +#endif + return true; +} + +/* +Fills structure with parameters of an example buffer to be used for transfers +during GPU memory defragmentation. +*/ +static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo) +{ + memset(&outBufCreateInfo, 0, sizeof(outBufCreateInfo)); + outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE; // Example size. +} + + +/* +Performs binary search and returns iterator to first element that is greater or +equal to (key), according to comparison (cmp). + +Cmp should return true if first argument is less than second argument. + +Returned value is the found element, if present in the collection or place where +new element with value (key) should be inserted. +*/ +template +static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT& key, const CmpLess& cmp) +{ + size_t down = 0, up = (end - beg); + while (down < up) + { + const size_t mid = down + (up - down) / 2; // Overflow-safe midpoint calculation + if (cmp(*(beg + mid), key)) + { + down = mid + 1; + } + else + { + up = mid; + } + } + return beg + down; +} + +template +IterT VmaBinaryFindSorted(const IterT& beg, const IterT& end, const KeyT& value, const CmpLess& cmp) +{ + IterT it = VmaBinaryFindFirstNotLess( + beg, end, value, cmp); + if (it == end || + (!cmp(*it, value) && !cmp(value, *it))) + { + return it; + } + return end; +} + +/* +Returns true if all pointers in the array are not-null and unique. +Warning! O(n^2) complexity. Use only inside VMA_HEAVY_ASSERT. +T must be pointer type, e.g. VmaAllocation, VmaPool. +*/ +template +static bool VmaValidatePointerArray(uint32_t count, const T* arr) +{ + for (uint32_t i = 0; i < count; ++i) + { + const T iPtr = arr[i]; + if (iPtr == VMA_NULL) + { + return false; + } + for (uint32_t j = i + 1; j < count; ++j) + { + if (iPtr == arr[j]) + { + return false; + } + } + } + return true; +} + +template +static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct) +{ + newStruct->pNext = mainStruct->pNext; + mainStruct->pNext = newStruct; +} + +// This is the main algorithm that guides the selection of a memory type best for an allocation - +// converts usage to required/preferred/not preferred flags. +static bool FindMemoryPreferences( + bool isIntegratedGPU, + const VmaAllocationCreateInfo& allocCreateInfo, + VkFlags bufImgUsage, // VkBufferCreateInfo::usage or VkImageCreateInfo::usage. UINT32_MAX if unknown. + VkMemoryPropertyFlags& outRequiredFlags, + VkMemoryPropertyFlags& outPreferredFlags, + VkMemoryPropertyFlags& outNotPreferredFlags) +{ + outRequiredFlags = allocCreateInfo.requiredFlags; + outPreferredFlags = allocCreateInfo.preferredFlags; + outNotPreferredFlags = 0; + + switch(allocCreateInfo.usage) + { + case VMA_MEMORY_USAGE_UNKNOWN: + break; + case VMA_MEMORY_USAGE_GPU_ONLY: + if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + case VMA_MEMORY_USAGE_CPU_ONLY: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + break; + case VMA_MEMORY_USAGE_CPU_TO_GPU: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + case VMA_MEMORY_USAGE_GPU_TO_CPU: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + break; + case VMA_MEMORY_USAGE_CPU_COPY: + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + break; + case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: + outRequiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT; + break; + case VMA_MEMORY_USAGE_AUTO: + case VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE: + case VMA_MEMORY_USAGE_AUTO_PREFER_HOST: + { + if(bufImgUsage == UINT32_MAX) + { + VMA_ASSERT(0 && "VMA_MEMORY_USAGE_AUTO* values can only be used with functions like vmaCreateBuffer, vmaCreateImage so that the details of the created resource are known."); + return false; + } + // This relies on values of VK_IMAGE_USAGE_TRANSFER* being the same VK_BUFFER_IMAGE_TRANSFER*. + const bool deviceAccess = (bufImgUsage & ~(VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) != 0; + const bool hostAccessSequentialWrite = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT) != 0; + const bool hostAccessRandom = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT) != 0; + const bool hostAccessAllowTransferInstead = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT) != 0; + const bool preferDevice = allocCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; + const bool preferHost = allocCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_HOST; + + // CPU random access - e.g. a buffer written to or transferred from GPU to read back on CPU. + if(hostAccessRandom) + { + if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost) + { + // Nice if it will end up in HOST_VISIBLE, but more importantly prefer DEVICE_LOCAL. + // Omitting HOST_VISIBLE here is intentional. + // In case there is DEVICE_LOCAL | HOST_VISIBLE | HOST_CACHED, it will pick that one. + // Otherwise, this will give same weight to DEVICE_LOCAL as HOST_VISIBLE | HOST_CACHED and select the former if occurs first on the list. + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + } + else + { + // Always CPU memory, cached. + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + } + } + // CPU sequential write - may be CPU or host-visible GPU memory, uncached and write-combined. + else if(hostAccessSequentialWrite) + { + // Want uncached and write-combined. + outNotPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + + if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + } + else + { + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + // Direct GPU access, CPU sequential write (e.g. a dynamic uniform buffer updated every frame) + if(deviceAccess) + { + // Could go to CPU memory or GPU BAR/unified. Up to the user to decide. If no preference, choose GPU memory. + if(preferHost) + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + // GPU no direct access, CPU sequential write (e.g. an upload buffer to be transferred to the GPU) + else + { + // Could go to CPU memory or GPU BAR/unified. Up to the user to decide. If no preference, choose CPU memory. + if(preferDevice) + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + } + } + // No CPU access + else + { + // GPU access, no CPU access (e.g. a color attachment image) - prefer GPU memory + if(deviceAccess) + { + // ...unless there is a clear preference from the user not to do so. + if(preferHost) + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + // No direct GPU access, no CPU access, just transfers. + // It may be staging copy intended for e.g. preserving image for next frame (then better GPU memory) or + // a "swap file" copy to free some GPU memory (then better CPU memory). + // Up to the user to decide. If no preferece, assume the former and choose GPU memory. + if(preferHost) + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + } + default: + VMA_ASSERT(0); + } + + // Avoid DEVICE_COHERENT unless explicitly requested. + if(((allocCreateInfo.requiredFlags | allocCreateInfo.preferredFlags) & + (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0) + { + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY; + } + + return true; +} + +//////////////////////////////////////////////////////////////////////////////// +// Memory allocation + +static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment) +{ + void* result = VMA_NULL; + if ((pAllocationCallbacks != VMA_NULL) && + (pAllocationCallbacks->pfnAllocation != VMA_NULL)) + { + result = (*pAllocationCallbacks->pfnAllocation)( + pAllocationCallbacks->pUserData, + size, + alignment, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + } + else + { + result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment); + } + VMA_ASSERT(result != VMA_NULL && "CPU memory allocation failed."); + return result; +} + +static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr) +{ + if ((pAllocationCallbacks != VMA_NULL) && + (pAllocationCallbacks->pfnFree != VMA_NULL)) + { + (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr); + } + else + { + VMA_SYSTEM_ALIGNED_FREE(ptr); + } +} + +template +static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks) +{ + return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T)); +} + +template +static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count) +{ + return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T)); +} + +#define vma_new(allocator, type) new(VmaAllocate(allocator))(type) + +#define vma_new_array(allocator, type, count) new(VmaAllocateArray((allocator), (count)))(type) + +template +static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr) +{ + ptr->~T(); + VmaFree(pAllocationCallbacks, ptr); +} + +template +static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count) +{ + if (ptr != VMA_NULL) + { + for (size_t i = count; i--; ) + { + ptr[i].~T(); + } + VmaFree(pAllocationCallbacks, ptr); + } +} + +static char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr) +{ + if (srcStr != VMA_NULL) + { + const size_t len = strlen(srcStr); + char* const result = vma_new_array(allocs, char, len + 1); + memcpy(result, srcStr, len + 1); + return result; + } + return VMA_NULL; +} + +#if VMA_STATS_STRING_ENABLED +static char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr, size_t strLen) +{ + if (srcStr != VMA_NULL) + { + char* const result = vma_new_array(allocs, char, strLen + 1); + memcpy(result, srcStr, strLen); + result[strLen] = '\0'; + return result; + } + return VMA_NULL; +} +#endif // VMA_STATS_STRING_ENABLED + +static void VmaFreeString(const VkAllocationCallbacks* allocs, char* str) +{ + if (str != VMA_NULL) + { + const size_t len = strlen(str); + vma_delete_array(allocs, str, len + 1); + } +} + +template +size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value) +{ + const size_t indexToInsert = VmaBinaryFindFirstNotLess( + vector.data(), + vector.data() + vector.size(), + value, + CmpLess()) - vector.data(); + VmaVectorInsert(vector, indexToInsert, value); + return indexToInsert; +} + +template +bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value) +{ + CmpLess comparator; + typename VectorT::iterator it = VmaBinaryFindFirstNotLess( + vector.begin(), + vector.end(), + value, + comparator); + if ((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it)) + { + size_t indexToRemove = it - vector.begin(); + VmaVectorRemove(vector, indexToRemove); + return true; + } + return false; +} +#endif // _VMA_FUNCTIONS + +#ifndef _VMA_STATISTICS_FUNCTIONS + +static void VmaClearStatistics(VmaStatistics& outStats) +{ + outStats.blockCount = 0; + outStats.allocationCount = 0; + outStats.blockBytes = 0; + outStats.allocationBytes = 0; +} + +static void VmaAddStatistics(VmaStatistics& inoutStats, const VmaStatistics& src) +{ + inoutStats.blockCount += src.blockCount; + inoutStats.allocationCount += src.allocationCount; + inoutStats.blockBytes += src.blockBytes; + inoutStats.allocationBytes += src.allocationBytes; +} + +static void VmaClearDetailedStatistics(VmaDetailedStatistics& outStats) +{ + VmaClearStatistics(outStats.statistics); + outStats.unusedRangeCount = 0; + outStats.allocationSizeMin = VK_WHOLE_SIZE; + outStats.allocationSizeMax = 0; + outStats.unusedRangeSizeMin = VK_WHOLE_SIZE; + outStats.unusedRangeSizeMax = 0; +} + +static void VmaAddDetailedStatisticsAllocation(VmaDetailedStatistics& inoutStats, VkDeviceSize size) +{ + inoutStats.statistics.allocationCount++; + inoutStats.statistics.allocationBytes += size; + inoutStats.allocationSizeMin = VMA_MIN(inoutStats.allocationSizeMin, size); + inoutStats.allocationSizeMax = VMA_MAX(inoutStats.allocationSizeMax, size); +} + +static void VmaAddDetailedStatisticsUnusedRange(VmaDetailedStatistics& inoutStats, VkDeviceSize size) +{ + inoutStats.unusedRangeCount++; + inoutStats.unusedRangeSizeMin = VMA_MIN(inoutStats.unusedRangeSizeMin, size); + inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, size); +} + +static void VmaAddDetailedStatistics(VmaDetailedStatistics& inoutStats, const VmaDetailedStatistics& src) +{ + VmaAddStatistics(inoutStats.statistics, src.statistics); + inoutStats.unusedRangeCount += src.unusedRangeCount; + inoutStats.allocationSizeMin = VMA_MIN(inoutStats.allocationSizeMin, src.allocationSizeMin); + inoutStats.allocationSizeMax = VMA_MAX(inoutStats.allocationSizeMax, src.allocationSizeMax); + inoutStats.unusedRangeSizeMin = VMA_MIN(inoutStats.unusedRangeSizeMin, src.unusedRangeSizeMin); + inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, src.unusedRangeSizeMax); +} + +#endif // _VMA_STATISTICS_FUNCTIONS + +#ifndef _VMA_MUTEX_LOCK +// Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope). +struct VmaMutexLock +{ + VMA_CLASS_NO_COPY(VmaMutexLock) +public: + VmaMutexLock(VMA_MUTEX& mutex, bool useMutex = true) : + m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->Lock(); } + } + ~VmaMutexLock() { if (m_pMutex) { m_pMutex->Unlock(); } } + +private: + VMA_MUTEX* m_pMutex; +}; + +// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for reading. +struct VmaMutexLockRead +{ + VMA_CLASS_NO_COPY(VmaMutexLockRead) +public: + VmaMutexLockRead(VMA_RW_MUTEX& mutex, bool useMutex) : + m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->LockRead(); } + } + ~VmaMutexLockRead() { if (m_pMutex) { m_pMutex->UnlockRead(); } } + +private: + VMA_RW_MUTEX* m_pMutex; +}; + +// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for writing. +struct VmaMutexLockWrite +{ + VMA_CLASS_NO_COPY(VmaMutexLockWrite) +public: + VmaMutexLockWrite(VMA_RW_MUTEX& mutex, bool useMutex) + : m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->LockWrite(); } + } + ~VmaMutexLockWrite() { if (m_pMutex) { m_pMutex->UnlockWrite(); } } + +private: + VMA_RW_MUTEX* m_pMutex; +}; + +#if VMA_DEBUG_GLOBAL_MUTEX + static VMA_MUTEX gDebugGlobalMutex; + #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); +#else + #define VMA_DEBUG_GLOBAL_MUTEX_LOCK +#endif +#endif // _VMA_MUTEX_LOCK + +#ifndef _VMA_ATOMIC_TRANSACTIONAL_INCREMENT +// An object that increments given atomic but decrements it back in the destructor unless Commit() is called. +template +struct AtomicTransactionalIncrement +{ +public: + typedef std::atomic AtomicT; + + ~AtomicTransactionalIncrement() + { + if(m_Atomic) + --(*m_Atomic); + } + + void Commit() { m_Atomic = nullptr; } + T Increment(AtomicT* atomic) + { + m_Atomic = atomic; + return m_Atomic->fetch_add(1); + } + +private: + AtomicT* m_Atomic = nullptr; +}; +#endif // _VMA_ATOMIC_TRANSACTIONAL_INCREMENT + +#ifndef _VMA_STL_ALLOCATOR +// STL-compatible allocator. +template +struct VmaStlAllocator +{ + const VkAllocationCallbacks* const m_pCallbacks; + typedef T value_type; + + VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) {} + template + VmaStlAllocator(const VmaStlAllocator& src) : m_pCallbacks(src.m_pCallbacks) {} + VmaStlAllocator(const VmaStlAllocator&) = default; + VmaStlAllocator& operator=(const VmaStlAllocator&) = delete; + + T* allocate(size_t n) { return VmaAllocateArray(m_pCallbacks, n); } + void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); } + + template + bool operator==(const VmaStlAllocator& rhs) const + { + return m_pCallbacks == rhs.m_pCallbacks; + } + template + bool operator!=(const VmaStlAllocator& rhs) const + { + return m_pCallbacks != rhs.m_pCallbacks; + } +}; +#endif // _VMA_STL_ALLOCATOR + +#ifndef _VMA_VECTOR +/* Class with interface compatible with subset of std::vector. +T must be POD because constructors and destructors are not called and memcpy is +used for these objects. */ +template +class VmaVector +{ +public: + typedef T value_type; + typedef T* iterator; + typedef const T* const_iterator; + + VmaVector(const AllocatorT& allocator); + VmaVector(size_t count, const AllocatorT& allocator); + // This version of the constructor is here for compatibility with pre-C++14 std::vector. + // value is unused. + VmaVector(size_t count, const T& value, const AllocatorT& allocator) : VmaVector(count, allocator) {} + VmaVector(const VmaVector& src); + VmaVector& operator=(const VmaVector& rhs); + ~VmaVector() { VmaFree(m_Allocator.m_pCallbacks, m_pArray); } + + bool empty() const { return m_Count == 0; } + size_t size() const { return m_Count; } + T* data() { return m_pArray; } + T& front() { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[0]; } + T& back() { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[m_Count - 1]; } + const T* data() const { return m_pArray; } + const T& front() const { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[0]; } + const T& back() const { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[m_Count - 1]; } + + iterator begin() { return m_pArray; } + iterator end() { return m_pArray + m_Count; } + const_iterator cbegin() const { return m_pArray; } + const_iterator cend() const { return m_pArray + m_Count; } + const_iterator begin() const { return cbegin(); } + const_iterator end() const { return cend(); } + + void pop_front() { VMA_HEAVY_ASSERT(m_Count > 0); remove(0); } + void pop_back() { VMA_HEAVY_ASSERT(m_Count > 0); resize(size() - 1); } + void push_front(const T& src) { insert(0, src); } + + void push_back(const T& src); + void reserve(size_t newCapacity, bool freeMemory = false); + void resize(size_t newCount); + void clear() { resize(0); } + void shrink_to_fit(); + void insert(size_t index, const T& src); + void remove(size_t index); + + T& operator[](size_t index) { VMA_HEAVY_ASSERT(index < m_Count); return m_pArray[index]; } + const T& operator[](size_t index) const { VMA_HEAVY_ASSERT(index < m_Count); return m_pArray[index]; } + +private: + AllocatorT m_Allocator; + T* m_pArray; + size_t m_Count; + size_t m_Capacity; +}; + +#ifndef _VMA_VECTOR_FUNCTIONS +template +VmaVector::VmaVector(const AllocatorT& allocator) + : m_Allocator(allocator), + m_pArray(VMA_NULL), + m_Count(0), + m_Capacity(0) {} + +template +VmaVector::VmaVector(size_t count, const AllocatorT& allocator) + : m_Allocator(allocator), + m_pArray(count ? (T*)VmaAllocateArray(allocator.m_pCallbacks, count) : VMA_NULL), + m_Count(count), + m_Capacity(count) {} + +template +VmaVector::VmaVector(const VmaVector& src) + : m_Allocator(src.m_Allocator), + m_pArray(src.m_Count ? (T*)VmaAllocateArray(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL), + m_Count(src.m_Count), + m_Capacity(src.m_Count) +{ + if (m_Count != 0) + { + memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T)); + } +} + +template +VmaVector& VmaVector::operator=(const VmaVector& rhs) +{ + if (&rhs != this) + { + resize(rhs.m_Count); + if (m_Count != 0) + { + memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T)); + } + } + return *this; +} + +template +void VmaVector::push_back(const T& src) +{ + const size_t newIndex = size(); + resize(newIndex + 1); + m_pArray[newIndex] = src; +} + +template +void VmaVector::reserve(size_t newCapacity, bool freeMemory) +{ + newCapacity = VMA_MAX(newCapacity, m_Count); + + if ((newCapacity < m_Capacity) && !freeMemory) + { + newCapacity = m_Capacity; + } + + if (newCapacity != m_Capacity) + { + T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator, newCapacity) : VMA_NULL; + if (m_Count != 0) + { + memcpy(newArray, m_pArray, m_Count * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = newCapacity; + m_pArray = newArray; + } +} + +template +void VmaVector::resize(size_t newCount) +{ + size_t newCapacity = m_Capacity; + if (newCount > m_Capacity) + { + newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8)); + } + + if (newCapacity != m_Capacity) + { + T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL; + const size_t elementsToCopy = VMA_MIN(m_Count, newCount); + if (elementsToCopy != 0) + { + memcpy(newArray, m_pArray, elementsToCopy * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = newCapacity; + m_pArray = newArray; + } + + m_Count = newCount; +} + +template +void VmaVector::shrink_to_fit() +{ + if (m_Capacity > m_Count) + { + T* newArray = VMA_NULL; + if (m_Count > 0) + { + newArray = VmaAllocateArray(m_Allocator.m_pCallbacks, m_Count); + memcpy(newArray, m_pArray, m_Count * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = m_Count; + m_pArray = newArray; + } +} + +template +void VmaVector::insert(size_t index, const T& src) +{ + VMA_HEAVY_ASSERT(index <= m_Count); + const size_t oldCount = size(); + resize(oldCount + 1); + if (index < oldCount) + { + memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T)); + } + m_pArray[index] = src; +} + +template +void VmaVector::remove(size_t index) +{ + VMA_HEAVY_ASSERT(index < m_Count); + const size_t oldCount = size(); + if (index < oldCount - 1) + { + memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T)); + } + resize(oldCount - 1); +} +#endif // _VMA_VECTOR_FUNCTIONS + +template +static void VmaVectorInsert(VmaVector& vec, size_t index, const T& item) +{ + vec.insert(index, item); +} + +template +static void VmaVectorRemove(VmaVector& vec, size_t index) +{ + vec.remove(index); +} +#endif // _VMA_VECTOR + +#ifndef _VMA_SMALL_VECTOR +/* +This is a vector (a variable-sized array), optimized for the case when the array is small. + +It contains some number of elements in-place, which allows it to avoid heap allocation +when the actual number of elements is below that threshold. This allows normal "small" +cases to be fast without losing generality for large inputs. +*/ +template +class VmaSmallVector +{ +public: + typedef T value_type; + typedef T* iterator; + + VmaSmallVector(const AllocatorT& allocator); + VmaSmallVector(size_t count, const AllocatorT& allocator); + template + VmaSmallVector(const VmaSmallVector&) = delete; + template + VmaSmallVector& operator=(const VmaSmallVector&) = delete; + ~VmaSmallVector() = default; + + bool empty() const { return m_Count == 0; } + size_t size() const { return m_Count; } + T* data() { return m_Count > N ? m_DynamicArray.data() : m_StaticArray; } + T& front() { VMA_HEAVY_ASSERT(m_Count > 0); return data()[0]; } + T& back() { VMA_HEAVY_ASSERT(m_Count > 0); return data()[m_Count - 1]; } + const T* data() const { return m_Count > N ? m_DynamicArray.data() : m_StaticArray; } + const T& front() const { VMA_HEAVY_ASSERT(m_Count > 0); return data()[0]; } + const T& back() const { VMA_HEAVY_ASSERT(m_Count > 0); return data()[m_Count - 1]; } + + iterator begin() { return data(); } + iterator end() { return data() + m_Count; } + + void pop_front() { VMA_HEAVY_ASSERT(m_Count > 0); remove(0); } + void pop_back() { VMA_HEAVY_ASSERT(m_Count > 0); resize(size() - 1); } + void push_front(const T& src) { insert(0, src); } + + void push_back(const T& src); + void resize(size_t newCount, bool freeMemory = false); + void clear(bool freeMemory = false); + void insert(size_t index, const T& src); + void remove(size_t index); + + T& operator[](size_t index) { VMA_HEAVY_ASSERT(index < m_Count); return data()[index]; } + const T& operator[](size_t index) const { VMA_HEAVY_ASSERT(index < m_Count); return data()[index]; } + +private: + size_t m_Count; + T m_StaticArray[N]; // Used when m_Size <= N + VmaVector m_DynamicArray; // Used when m_Size > N +}; + +#ifndef _VMA_SMALL_VECTOR_FUNCTIONS +template +VmaSmallVector::VmaSmallVector(const AllocatorT& allocator) + : m_Count(0), + m_DynamicArray(allocator) {} + +template +VmaSmallVector::VmaSmallVector(size_t count, const AllocatorT& allocator) + : m_Count(count), + m_DynamicArray(count > N ? count : 0, allocator) {} + +template +void VmaSmallVector::push_back(const T& src) +{ + const size_t newIndex = size(); + resize(newIndex + 1); + data()[newIndex] = src; +} + +template +void VmaSmallVector::resize(size_t newCount, bool freeMemory) +{ + if (newCount > N && m_Count > N) + { + // Any direction, staying in m_DynamicArray + m_DynamicArray.resize(newCount); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + } + else if (newCount > N && m_Count <= N) + { + // Growing, moving from m_StaticArray to m_DynamicArray + m_DynamicArray.resize(newCount); + if (m_Count > 0) + { + memcpy(m_DynamicArray.data(), m_StaticArray, m_Count * sizeof(T)); + } + } + else if (newCount <= N && m_Count > N) + { + // Shrinking, moving from m_DynamicArray to m_StaticArray + if (newCount > 0) + { + memcpy(m_StaticArray, m_DynamicArray.data(), newCount * sizeof(T)); + } + m_DynamicArray.resize(0); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + } + else + { + // Any direction, staying in m_StaticArray - nothing to do here + } + m_Count = newCount; +} + +template +void VmaSmallVector::clear(bool freeMemory) +{ + m_DynamicArray.clear(); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + m_Count = 0; +} + +template +void VmaSmallVector::insert(size_t index, const T& src) +{ + VMA_HEAVY_ASSERT(index <= m_Count); + const size_t oldCount = size(); + resize(oldCount + 1); + T* const dataPtr = data(); + if (index < oldCount) + { + // I know, this could be more optimal for case where memmove can be memcpy directly from m_StaticArray to m_DynamicArray. + memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) * sizeof(T)); + } + dataPtr[index] = src; +} + +template +void VmaSmallVector::remove(size_t index) +{ + VMA_HEAVY_ASSERT(index < m_Count); + const size_t oldCount = size(); + if (index < oldCount - 1) + { + // I know, this could be more optimal for case where memmove can be memcpy directly from m_DynamicArray to m_StaticArray. + T* const dataPtr = data(); + memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) * sizeof(T)); + } + resize(oldCount - 1); +} +#endif // _VMA_SMALL_VECTOR_FUNCTIONS +#endif // _VMA_SMALL_VECTOR + +#ifndef _VMA_POOL_ALLOCATOR +/* +Allocator for objects of type T using a list of arrays (pools) to speed up +allocation. Number of elements that can be allocated is not bounded because +allocator can create multiple blocks. +*/ +template +class VmaPoolAllocator +{ + VMA_CLASS_NO_COPY(VmaPoolAllocator) +public: + VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity); + ~VmaPoolAllocator(); + template T* Alloc(Types&&... args); + void Free(T* ptr); + +private: + union Item + { + uint32_t NextFreeIndex; + alignas(T) char Value[sizeof(T)]; + }; + struct ItemBlock + { + Item* pItems; + uint32_t Capacity; + uint32_t FirstFreeIndex; + }; + + const VkAllocationCallbacks* m_pAllocationCallbacks; + const uint32_t m_FirstBlockCapacity; + VmaVector> m_ItemBlocks; + + ItemBlock& CreateNewBlock(); +}; + +#ifndef _VMA_POOL_ALLOCATOR_FUNCTIONS +template +VmaPoolAllocator::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) + : m_pAllocationCallbacks(pAllocationCallbacks), + m_FirstBlockCapacity(firstBlockCapacity), + m_ItemBlocks(VmaStlAllocator(pAllocationCallbacks)) +{ + VMA_ASSERT(m_FirstBlockCapacity > 1); +} + +template +VmaPoolAllocator::~VmaPoolAllocator() +{ + for (size_t i = m_ItemBlocks.size(); i--;) + vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity); + m_ItemBlocks.clear(); +} + +template +template T* VmaPoolAllocator::Alloc(Types&&... args) +{ + for (size_t i = m_ItemBlocks.size(); i--; ) + { + ItemBlock& block = m_ItemBlocks[i]; + // This block has some free items: Use first one. + if (block.FirstFreeIndex != UINT32_MAX) + { + Item* const pItem = &block.pItems[block.FirstFreeIndex]; + block.FirstFreeIndex = pItem->NextFreeIndex; + T* result = (T*)&pItem->Value; + new(result)T(std::forward(args)...); // Explicit constructor call. + return result; + } + } + + // No block has free item: Create new one and use it. + ItemBlock& newBlock = CreateNewBlock(); + Item* const pItem = &newBlock.pItems[0]; + newBlock.FirstFreeIndex = pItem->NextFreeIndex; + T* result = (T*)&pItem->Value; + new(result) T(std::forward(args)...); // Explicit constructor call. + return result; +} + +template +void VmaPoolAllocator::Free(T* ptr) +{ + // Search all memory blocks to find ptr. + for (size_t i = m_ItemBlocks.size(); i--; ) + { + ItemBlock& block = m_ItemBlocks[i]; + + // Casting to union. + Item* pItemPtr; + memcpy(&pItemPtr, &ptr, sizeof(pItemPtr)); + + // Check if pItemPtr is in address range of this block. + if ((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity)) + { + ptr->~T(); // Explicit destructor call. + const uint32_t index = static_cast(pItemPtr - block.pItems); + pItemPtr->NextFreeIndex = block.FirstFreeIndex; + block.FirstFreeIndex = index; + return; + } + } + VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool."); +} + +template +typename VmaPoolAllocator::ItemBlock& VmaPoolAllocator::CreateNewBlock() +{ + const uint32_t newBlockCapacity = m_ItemBlocks.empty() ? + m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2; + + const ItemBlock newBlock = + { + vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity), + newBlockCapacity, + 0 + }; + + m_ItemBlocks.push_back(newBlock); + + // Setup singly-linked list of all free items in this block. + for (uint32_t i = 0; i < newBlockCapacity - 1; ++i) + newBlock.pItems[i].NextFreeIndex = i + 1; + newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX; + return m_ItemBlocks.back(); +} +#endif // _VMA_POOL_ALLOCATOR_FUNCTIONS +#endif // _VMA_POOL_ALLOCATOR + +#ifndef _VMA_RAW_LIST +template +struct VmaListItem +{ + VmaListItem* pPrev; + VmaListItem* pNext; + T Value; +}; + +// Doubly linked list. +template +class VmaRawList +{ + VMA_CLASS_NO_COPY(VmaRawList) +public: + typedef VmaListItem ItemType; + + VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks); + // Intentionally not calling Clear, because that would be unnecessary + // computations to return all items to m_ItemAllocator as free. + ~VmaRawList() = default; + + size_t GetCount() const { return m_Count; } + bool IsEmpty() const { return m_Count == 0; } + + ItemType* Front() { return m_pFront; } + ItemType* Back() { return m_pBack; } + const ItemType* Front() const { return m_pFront; } + const ItemType* Back() const { return m_pBack; } + + ItemType* PushFront(); + ItemType* PushBack(); + ItemType* PushFront(const T& value); + ItemType* PushBack(const T& value); + void PopFront(); + void PopBack(); + + // Item can be null - it means PushBack. + ItemType* InsertBefore(ItemType* pItem); + // Item can be null - it means PushFront. + ItemType* InsertAfter(ItemType* pItem); + ItemType* InsertBefore(ItemType* pItem, const T& value); + ItemType* InsertAfter(ItemType* pItem, const T& value); + + void Clear(); + void Remove(ItemType* pItem); + +private: + const VkAllocationCallbacks* const m_pAllocationCallbacks; + VmaPoolAllocator m_ItemAllocator; + ItemType* m_pFront; + ItemType* m_pBack; + size_t m_Count; +}; + +#ifndef _VMA_RAW_LIST_FUNCTIONS +template +VmaRawList::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) + : m_pAllocationCallbacks(pAllocationCallbacks), + m_ItemAllocator(pAllocationCallbacks, 128), + m_pFront(VMA_NULL), + m_pBack(VMA_NULL), + m_Count(0) {} + +template +VmaListItem* VmaRawList::PushFront() +{ + ItemType* const pNewItem = m_ItemAllocator.Alloc(); + pNewItem->pPrev = VMA_NULL; + if (IsEmpty()) + { + pNewItem->pNext = VMA_NULL; + m_pFront = pNewItem; + m_pBack = pNewItem; + m_Count = 1; + } + else + { + pNewItem->pNext = m_pFront; + m_pFront->pPrev = pNewItem; + m_pFront = pNewItem; + ++m_Count; + } + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushBack() +{ + ItemType* const pNewItem = m_ItemAllocator.Alloc(); + pNewItem->pNext = VMA_NULL; + if(IsEmpty()) + { + pNewItem->pPrev = VMA_NULL; + m_pFront = pNewItem; + m_pBack = pNewItem; + m_Count = 1; + } + else + { + pNewItem->pPrev = m_pBack; + m_pBack->pNext = pNewItem; + m_pBack = pNewItem; + ++m_Count; + } + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushFront(const T& value) +{ + ItemType* const pNewItem = PushFront(); + pNewItem->Value = value; + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushBack(const T& value) +{ + ItemType* const pNewItem = PushBack(); + pNewItem->Value = value; + return pNewItem; +} + +template +void VmaRawList::PopFront() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const pFrontItem = m_pFront; + ItemType* const pNextItem = pFrontItem->pNext; + if (pNextItem != VMA_NULL) + { + pNextItem->pPrev = VMA_NULL; + } + m_pFront = pNextItem; + m_ItemAllocator.Free(pFrontItem); + --m_Count; +} + +template +void VmaRawList::PopBack() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const pBackItem = m_pBack; + ItemType* const pPrevItem = pBackItem->pPrev; + if(pPrevItem != VMA_NULL) + { + pPrevItem->pNext = VMA_NULL; + } + m_pBack = pPrevItem; + m_ItemAllocator.Free(pBackItem); + --m_Count; +} + +template +void VmaRawList::Clear() +{ + if (IsEmpty() == false) + { + ItemType* pItem = m_pBack; + while (pItem != VMA_NULL) + { + ItemType* const pPrevItem = pItem->pPrev; + m_ItemAllocator.Free(pItem); + pItem = pPrevItem; + } + m_pFront = VMA_NULL; + m_pBack = VMA_NULL; + m_Count = 0; + } +} + +template +void VmaRawList::Remove(ItemType* pItem) +{ + VMA_HEAVY_ASSERT(pItem != VMA_NULL); + VMA_HEAVY_ASSERT(m_Count > 0); + + if(pItem->pPrev != VMA_NULL) + { + pItem->pPrev->pNext = pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(m_pFront == pItem); + m_pFront = pItem->pNext; + } + + if(pItem->pNext != VMA_NULL) + { + pItem->pNext->pPrev = pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(m_pBack == pItem); + m_pBack = pItem->pPrev; + } + + m_ItemAllocator.Free(pItem); + --m_Count; +} + +template +VmaListItem* VmaRawList::InsertBefore(ItemType* pItem) +{ + if(pItem != VMA_NULL) + { + ItemType* const prevItem = pItem->pPrev; + ItemType* const newItem = m_ItemAllocator.Alloc(); + newItem->pPrev = prevItem; + newItem->pNext = pItem; + pItem->pPrev = newItem; + if(prevItem != VMA_NULL) + { + prevItem->pNext = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_pFront == pItem); + m_pFront = newItem; + } + ++m_Count; + return newItem; + } + else + return PushBack(); +} + +template +VmaListItem* VmaRawList::InsertAfter(ItemType* pItem) +{ + if(pItem != VMA_NULL) + { + ItemType* const nextItem = pItem->pNext; + ItemType* const newItem = m_ItemAllocator.Alloc(); + newItem->pNext = nextItem; + newItem->pPrev = pItem; + pItem->pNext = newItem; + if(nextItem != VMA_NULL) + { + nextItem->pPrev = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_pBack == pItem); + m_pBack = newItem; + } + ++m_Count; + return newItem; + } + else + return PushFront(); +} + +template +VmaListItem* VmaRawList::InsertBefore(ItemType* pItem, const T& value) +{ + ItemType* const newItem = InsertBefore(pItem); + newItem->Value = value; + return newItem; +} + +template +VmaListItem* VmaRawList::InsertAfter(ItemType* pItem, const T& value) +{ + ItemType* const newItem = InsertAfter(pItem); + newItem->Value = value; + return newItem; +} +#endif // _VMA_RAW_LIST_FUNCTIONS +#endif // _VMA_RAW_LIST + +#ifndef _VMA_LIST +template +class VmaList +{ + VMA_CLASS_NO_COPY(VmaList) +public: + class reverse_iterator; + class const_iterator; + class const_reverse_iterator; + + class iterator + { + friend class const_iterator; + friend class VmaList; + public: + iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + iterator operator++(int) { iterator result = *this; ++*this; return result; } + iterator operator--(int) { iterator result = *this; --*this; return result; } + + iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext; return *this; } + iterator& operator--(); + + private: + VmaRawList* m_pList; + VmaListItem* m_pItem; + + iterator(VmaRawList* pList, VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class reverse_iterator + { + friend class const_reverse_iterator; + friend class VmaList; + public: + reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + reverse_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + reverse_iterator operator++(int) { reverse_iterator result = *this; ++* this; return result; } + reverse_iterator operator--(int) { reverse_iterator result = *this; --* this; return result; } + + reverse_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev; return *this; } + reverse_iterator& operator--(); + + private: + VmaRawList* m_pList; + VmaListItem* m_pItem; + + reverse_iterator(VmaRawList* pList, VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class const_iterator + { + friend class VmaList; + public: + const_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + const_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + const_iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + iterator drop_const() { return { const_cast*>(m_pList), const_cast*>(m_pItem) }; } + + const T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + const T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const const_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const const_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + const_iterator operator++(int) { const_iterator result = *this; ++* this; return result; } + const_iterator operator--(int) { const_iterator result = *this; --* this; return result; } + + const_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext; return *this; } + const_iterator& operator--(); + + private: + const VmaRawList* m_pList; + const VmaListItem* m_pItem; + + const_iterator(const VmaRawList* pList, const VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class const_reverse_iterator + { + friend class VmaList; + public: + const_reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + const_reverse_iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + const_reverse_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + reverse_iterator drop_const() { return { const_cast*>(m_pList), const_cast*>(m_pItem) }; } + + const T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + const T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const const_reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const const_reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + const_reverse_iterator operator++(int) { const_reverse_iterator result = *this; ++* this; return result; } + const_reverse_iterator operator--(int) { const_reverse_iterator result = *this; --* this; return result; } + + const_reverse_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev; return *this; } + const_reverse_iterator& operator--(); + + private: + const VmaRawList* m_pList; + const VmaListItem* m_pItem; + + const_reverse_iterator(const VmaRawList* pList, const VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + + VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) {} + + bool empty() const { return m_RawList.IsEmpty(); } + size_t size() const { return m_RawList.GetCount(); } + + iterator begin() { return iterator(&m_RawList, m_RawList.Front()); } + iterator end() { return iterator(&m_RawList, VMA_NULL); } + + const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); } + const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); } + + const_iterator begin() const { return cbegin(); } + const_iterator end() const { return cend(); } + + reverse_iterator rbegin() { return reverse_iterator(&m_RawList, m_RawList.Back()); } + reverse_iterator rend() { return reverse_iterator(&m_RawList, VMA_NULL); } + + const_reverse_iterator crbegin() const { return const_reverse_iterator(&m_RawList, m_RawList.Back()); } + const_reverse_iterator crend() const { return const_reverse_iterator(&m_RawList, VMA_NULL); } + + const_reverse_iterator rbegin() const { return crbegin(); } + const_reverse_iterator rend() const { return crend(); } + + void push_back(const T& value) { m_RawList.PushBack(value); } + iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); } + + void clear() { m_RawList.Clear(); } + void erase(iterator it) { m_RawList.Remove(it.m_pItem); } + +private: + VmaRawList m_RawList; +}; + +#ifndef _VMA_LIST_FUNCTIONS +template +typename VmaList::iterator& VmaList::iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} + +template +typename VmaList::reverse_iterator& VmaList::reverse_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Front(); + } + return *this; +} + +template +typename VmaList::const_iterator& VmaList::const_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} + +template +typename VmaList::const_reverse_iterator& VmaList::const_reverse_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} +#endif // _VMA_LIST_FUNCTIONS +#endif // _VMA_LIST + +#ifndef _VMA_INTRUSIVE_LINKED_LIST +/* +Expected interface of ItemTypeTraits: +struct MyItemTypeTraits +{ + typedef MyItem ItemType; + static ItemType* GetPrev(const ItemType* item) { return item->myPrevPtr; } + static ItemType* GetNext(const ItemType* item) { return item->myNextPtr; } + static ItemType*& AccessPrev(ItemType* item) { return item->myPrevPtr; } + static ItemType*& AccessNext(ItemType* item) { return item->myNextPtr; } +}; +*/ +template +class VmaIntrusiveLinkedList +{ +public: + typedef typename ItemTypeTraits::ItemType ItemType; + static ItemType* GetPrev(const ItemType* item) { return ItemTypeTraits::GetPrev(item); } + static ItemType* GetNext(const ItemType* item) { return ItemTypeTraits::GetNext(item); } + + // Movable, not copyable. + VmaIntrusiveLinkedList() = default; + VmaIntrusiveLinkedList(VmaIntrusiveLinkedList && src); + VmaIntrusiveLinkedList(const VmaIntrusiveLinkedList&) = delete; + VmaIntrusiveLinkedList& operator=(VmaIntrusiveLinkedList&& src); + VmaIntrusiveLinkedList& operator=(const VmaIntrusiveLinkedList&) = delete; + ~VmaIntrusiveLinkedList() { VMA_HEAVY_ASSERT(IsEmpty()); } + + size_t GetCount() const { return m_Count; } + bool IsEmpty() const { return m_Count == 0; } + ItemType* Front() { return m_Front; } + ItemType* Back() { return m_Back; } + const ItemType* Front() const { return m_Front; } + const ItemType* Back() const { return m_Back; } + + void PushBack(ItemType* item); + void PushFront(ItemType* item); + ItemType* PopBack(); + ItemType* PopFront(); + + // MyItem can be null - it means PushBack. + void InsertBefore(ItemType* existingItem, ItemType* newItem); + // MyItem can be null - it means PushFront. + void InsertAfter(ItemType* existingItem, ItemType* newItem); + void Remove(ItemType* item); + void RemoveAll(); + +private: + ItemType* m_Front = VMA_NULL; + ItemType* m_Back = VMA_NULL; + size_t m_Count = 0; +}; + +#ifndef _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS +template +VmaIntrusiveLinkedList::VmaIntrusiveLinkedList(VmaIntrusiveLinkedList&& src) + : m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count) +{ + src.m_Front = src.m_Back = VMA_NULL; + src.m_Count = 0; +} + +template +VmaIntrusiveLinkedList& VmaIntrusiveLinkedList::operator=(VmaIntrusiveLinkedList&& src) +{ + if (&src != this) + { + VMA_HEAVY_ASSERT(IsEmpty()); + m_Front = src.m_Front; + m_Back = src.m_Back; + m_Count = src.m_Count; + src.m_Front = src.m_Back = VMA_NULL; + src.m_Count = 0; + } + return *this; +} + +template +void VmaIntrusiveLinkedList::PushBack(ItemType* item) +{ + VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL); + if (IsEmpty()) + { + m_Front = item; + m_Back = item; + m_Count = 1; + } + else + { + ItemTypeTraits::AccessPrev(item) = m_Back; + ItemTypeTraits::AccessNext(m_Back) = item; + m_Back = item; + ++m_Count; + } +} + +template +void VmaIntrusiveLinkedList::PushFront(ItemType* item) +{ + VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL); + if (IsEmpty()) + { + m_Front = item; + m_Back = item; + m_Count = 1; + } + else + { + ItemTypeTraits::AccessNext(item) = m_Front; + ItemTypeTraits::AccessPrev(m_Front) = item; + m_Front = item; + ++m_Count; + } +} + +template +typename VmaIntrusiveLinkedList::ItemType* VmaIntrusiveLinkedList::PopBack() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const backItem = m_Back; + ItemType* const prevItem = ItemTypeTraits::GetPrev(backItem); + if (prevItem != VMA_NULL) + { + ItemTypeTraits::AccessNext(prevItem) = VMA_NULL; + } + m_Back = prevItem; + --m_Count; + ItemTypeTraits::AccessPrev(backItem) = VMA_NULL; + ItemTypeTraits::AccessNext(backItem) = VMA_NULL; + return backItem; +} + +template +typename VmaIntrusiveLinkedList::ItemType* VmaIntrusiveLinkedList::PopFront() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const frontItem = m_Front; + ItemType* const nextItem = ItemTypeTraits::GetNext(frontItem); + if (nextItem != VMA_NULL) + { + ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL; + } + m_Front = nextItem; + --m_Count; + ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL; + ItemTypeTraits::AccessNext(frontItem) = VMA_NULL; + return frontItem; +} + +template +void VmaIntrusiveLinkedList::InsertBefore(ItemType* existingItem, ItemType* newItem) +{ + VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL); + if (existingItem != VMA_NULL) + { + ItemType* const prevItem = ItemTypeTraits::GetPrev(existingItem); + ItemTypeTraits::AccessPrev(newItem) = prevItem; + ItemTypeTraits::AccessNext(newItem) = existingItem; + ItemTypeTraits::AccessPrev(existingItem) = newItem; + if (prevItem != VMA_NULL) + { + ItemTypeTraits::AccessNext(prevItem) = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_Front == existingItem); + m_Front = newItem; + } + ++m_Count; + } + else + PushBack(newItem); +} + +template +void VmaIntrusiveLinkedList::InsertAfter(ItemType* existingItem, ItemType* newItem) +{ + VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL); + if (existingItem != VMA_NULL) + { + ItemType* const nextItem = ItemTypeTraits::GetNext(existingItem); + ItemTypeTraits::AccessNext(newItem) = nextItem; + ItemTypeTraits::AccessPrev(newItem) = existingItem; + ItemTypeTraits::AccessNext(existingItem) = newItem; + if (nextItem != VMA_NULL) + { + ItemTypeTraits::AccessPrev(nextItem) = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_Back == existingItem); + m_Back = newItem; + } + ++m_Count; + } + else + return PushFront(newItem); +} + +template +void VmaIntrusiveLinkedList::Remove(ItemType* item) +{ + VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0); + if (ItemTypeTraits::GetPrev(item) != VMA_NULL) + { + ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item); + } + else + { + VMA_HEAVY_ASSERT(m_Front == item); + m_Front = ItemTypeTraits::GetNext(item); + } + + if (ItemTypeTraits::GetNext(item) != VMA_NULL) + { + ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item); + } + else + { + VMA_HEAVY_ASSERT(m_Back == item); + m_Back = ItemTypeTraits::GetPrev(item); + } + ItemTypeTraits::AccessPrev(item) = VMA_NULL; + ItemTypeTraits::AccessNext(item) = VMA_NULL; + --m_Count; +} + +template +void VmaIntrusiveLinkedList::RemoveAll() +{ + if (!IsEmpty()) + { + ItemType* item = m_Back; + while (item != VMA_NULL) + { + ItemType* const prevItem = ItemTypeTraits::AccessPrev(item); + ItemTypeTraits::AccessPrev(item) = VMA_NULL; + ItemTypeTraits::AccessNext(item) = VMA_NULL; + item = prevItem; + } + m_Front = VMA_NULL; + m_Back = VMA_NULL; + m_Count = 0; + } +} +#endif // _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS +#endif // _VMA_INTRUSIVE_LINKED_LIST + +// Unused in this version. +#if 0 + +#ifndef _VMA_PAIR +template +struct VmaPair +{ + T1 first; + T2 second; + + VmaPair() : first(), second() {} + VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) {} +}; + +template +struct VmaPairFirstLess +{ + bool operator()(const VmaPair& lhs, const VmaPair& rhs) const + { + return lhs.first < rhs.first; + } + bool operator()(const VmaPair& lhs, const FirstT& rhsFirst) const + { + return lhs.first < rhsFirst; + } +}; +#endif // _VMA_PAIR + +#ifndef _VMA_MAP +/* Class compatible with subset of interface of std::unordered_map. +KeyT, ValueT must be POD because they will be stored in VmaVector. +*/ +template +class VmaMap +{ +public: + typedef VmaPair PairType; + typedef PairType* iterator; + + VmaMap(const VmaStlAllocator& allocator) : m_Vector(allocator) {} + + iterator begin() { return m_Vector.begin(); } + iterator end() { return m_Vector.end(); } + size_t size() { return m_Vector.size(); } + + void insert(const PairType& pair); + iterator find(const KeyT& key); + void erase(iterator it); + +private: + VmaVector< PairType, VmaStlAllocator> m_Vector; +}; + +#ifndef _VMA_MAP_FUNCTIONS +template +void VmaMap::insert(const PairType& pair) +{ + const size_t indexToInsert = VmaBinaryFindFirstNotLess( + m_Vector.data(), + m_Vector.data() + m_Vector.size(), + pair, + VmaPairFirstLess()) - m_Vector.data(); + VmaVectorInsert(m_Vector, indexToInsert, pair); +} + +template +VmaPair* VmaMap::find(const KeyT& key) +{ + PairType* it = VmaBinaryFindFirstNotLess( + m_Vector.data(), + m_Vector.data() + m_Vector.size(), + key, + VmaPairFirstLess()); + if ((it != m_Vector.end()) && (it->first == key)) + { + return it; + } + else + { + return m_Vector.end(); + } +} + +template +void VmaMap::erase(iterator it) +{ + VmaVectorRemove(m_Vector, it - m_Vector.begin()); +} +#endif // _VMA_MAP_FUNCTIONS +#endif // _VMA_MAP + +#endif // #if 0 + +#if !defined(_VMA_STRING_BUILDER) && VMA_STATS_STRING_ENABLED +class VmaStringBuilder +{ +public: + VmaStringBuilder(const VkAllocationCallbacks* allocationCallbacks) : m_Data(VmaStlAllocator(allocationCallbacks)) {} + ~VmaStringBuilder() = default; + + size_t GetLength() const { return m_Data.size(); } + const char* GetData() const { return m_Data.data(); } + void AddNewLine() { Add('\n'); } + void Add(char ch) { m_Data.push_back(ch); } + + void Add(const char* pStr); + void AddNumber(uint32_t num); + void AddNumber(uint64_t num); + void AddPointer(const void* ptr); + +private: + VmaVector> m_Data; +}; + +#ifndef _VMA_STRING_BUILDER_FUNCTIONS +void VmaStringBuilder::Add(const char* pStr) +{ + const size_t strLen = strlen(pStr); + if (strLen > 0) + { + const size_t oldCount = m_Data.size(); + m_Data.resize(oldCount + strLen); + memcpy(m_Data.data() + oldCount, pStr, strLen); + } +} + +void VmaStringBuilder::AddNumber(uint32_t num) +{ + char buf[11]; + buf[10] = '\0'; + char* p = &buf[10]; + do + { + *--p = '0' + (num % 10); + num /= 10; + } while (num); + Add(p); +} + +void VmaStringBuilder::AddNumber(uint64_t num) +{ + char buf[21]; + buf[20] = '\0'; + char* p = &buf[20]; + do + { + *--p = '0' + (num % 10); + num /= 10; + } while (num); + Add(p); +} + +void VmaStringBuilder::AddPointer(const void* ptr) +{ + char buf[21]; + VmaPtrToStr(buf, sizeof(buf), ptr); + Add(buf); +} +#endif //_VMA_STRING_BUILDER_FUNCTIONS +#endif // _VMA_STRING_BUILDER + +#if !defined(_VMA_JSON_WRITER) && VMA_STATS_STRING_ENABLED +/* +Allows to conveniently build a correct JSON document to be written to the +VmaStringBuilder passed to the constructor. +*/ +class VmaJsonWriter +{ + VMA_CLASS_NO_COPY(VmaJsonWriter) +public: + // sb - string builder to write the document to. Must remain alive for the whole lifetime of this object. + VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb); + ~VmaJsonWriter(); + + // Begins object by writing "{". + // Inside an object, you must call pairs of WriteString and a value, e.g.: + // j.BeginObject(true); j.WriteString("A"); j.WriteNumber(1); j.WriteString("B"); j.WriteNumber(2); j.EndObject(); + // Will write: { "A": 1, "B": 2 } + void BeginObject(bool singleLine = false); + // Ends object by writing "}". + void EndObject(); + + // Begins array by writing "[". + // Inside an array, you can write a sequence of any values. + void BeginArray(bool singleLine = false); + // Ends array by writing "[". + void EndArray(); + + // Writes a string value inside "". + // pStr can contain any ANSI characters, including '"', new line etc. - they will be properly escaped. + void WriteString(const char* pStr); + + // Begins writing a string value. + // Call BeginString, ContinueString, ContinueString, ..., EndString instead of + // WriteString to conveniently build the string content incrementally, made of + // parts including numbers. + void BeginString(const char* pStr = VMA_NULL); + // Posts next part of an open string. + void ContinueString(const char* pStr); + // Posts next part of an open string. The number is converted to decimal characters. + void ContinueString(uint32_t n); + void ContinueString(uint64_t n); + void ContinueString_Size(size_t n); + // Posts next part of an open string. Pointer value is converted to characters + // using "%p" formatting - shown as hexadecimal number, e.g.: 000000081276Ad00 + void ContinueString_Pointer(const void* ptr); + // Ends writing a string value by writing '"'. + void EndString(const char* pStr = VMA_NULL); + + // Writes a number value. + void WriteNumber(uint32_t n); + void WriteNumber(uint64_t n); + void WriteSize(size_t n); + // Writes a boolean value - false or true. + void WriteBool(bool b); + // Writes a null value. + void WriteNull(); + +private: + enum COLLECTION_TYPE + { + COLLECTION_TYPE_OBJECT, + COLLECTION_TYPE_ARRAY, + }; + struct StackItem + { + COLLECTION_TYPE type; + uint32_t valueCount; + bool singleLineMode; + }; + + static const char* const INDENT; + + VmaStringBuilder& m_SB; + VmaVector< StackItem, VmaStlAllocator > m_Stack; + bool m_InsideString; + + // Write size_t for less than 64bits + void WriteSize(size_t n, std::integral_constant) { m_SB.AddNumber(static_cast(n)); } + // Write size_t for 64bits + void WriteSize(size_t n, std::integral_constant) { m_SB.AddNumber(static_cast(n)); } + + void BeginValue(bool isString); + void WriteIndent(bool oneLess = false); +}; +const char* const VmaJsonWriter::INDENT = " "; + +#ifndef _VMA_JSON_WRITER_FUNCTIONS +VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) + : m_SB(sb), + m_Stack(VmaStlAllocator(pAllocationCallbacks)), + m_InsideString(false) {} + +VmaJsonWriter::~VmaJsonWriter() +{ + VMA_ASSERT(!m_InsideString); + VMA_ASSERT(m_Stack.empty()); +} + +void VmaJsonWriter::BeginObject(bool singleLine) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(false); + m_SB.Add('{'); + + StackItem item; + item.type = COLLECTION_TYPE_OBJECT; + item.valueCount = 0; + item.singleLineMode = singleLine; + m_Stack.push_back(item); +} + +void VmaJsonWriter::EndObject() +{ + VMA_ASSERT(!m_InsideString); + + WriteIndent(true); + m_SB.Add('}'); + + VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT); + m_Stack.pop_back(); +} + +void VmaJsonWriter::BeginArray(bool singleLine) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(false); + m_SB.Add('['); + + StackItem item; + item.type = COLLECTION_TYPE_ARRAY; + item.valueCount = 0; + item.singleLineMode = singleLine; + m_Stack.push_back(item); +} + +void VmaJsonWriter::EndArray() +{ + VMA_ASSERT(!m_InsideString); + + WriteIndent(true); + m_SB.Add(']'); + + VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY); + m_Stack.pop_back(); +} + +void VmaJsonWriter::WriteString(const char* pStr) +{ + BeginString(pStr); + EndString(); +} + +void VmaJsonWriter::BeginString(const char* pStr) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(true); + m_SB.Add('"'); + m_InsideString = true; + if (pStr != VMA_NULL && pStr[0] != '\0') + { + ContinueString(pStr); + } +} + +void VmaJsonWriter::ContinueString(const char* pStr) +{ + VMA_ASSERT(m_InsideString); + + const size_t strLen = strlen(pStr); + for (size_t i = 0; i < strLen; ++i) + { + char ch = pStr[i]; + if (ch == '\\') + { + m_SB.Add("\\\\"); + } + else if (ch == '"') + { + m_SB.Add("\\\""); + } + else if (ch >= 32) + { + m_SB.Add(ch); + } + else switch (ch) + { + case '\b': + m_SB.Add("\\b"); + break; + case '\f': + m_SB.Add("\\f"); + break; + case '\n': + m_SB.Add("\\n"); + break; + case '\r': + m_SB.Add("\\r"); + break; + case '\t': + m_SB.Add("\\t"); + break; + default: + VMA_ASSERT(0 && "Character not currently supported."); + break; + } + } +} + +void VmaJsonWriter::ContinueString(uint32_t n) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::ContinueString(uint64_t n) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::ContinueString_Size(size_t n) +{ + VMA_ASSERT(m_InsideString); + // Fix for AppleClang incorrect type casting + // TODO: Change to if constexpr when C++17 used as minimal standard + WriteSize(n, std::is_same{}); +} + +void VmaJsonWriter::ContinueString_Pointer(const void* ptr) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddPointer(ptr); +} + +void VmaJsonWriter::EndString(const char* pStr) +{ + VMA_ASSERT(m_InsideString); + if (pStr != VMA_NULL && pStr[0] != '\0') + { + ContinueString(pStr); + } + m_SB.Add('"'); + m_InsideString = false; +} + +void VmaJsonWriter::WriteNumber(uint32_t n) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::WriteNumber(uint64_t n) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::WriteSize(size_t n) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + // Fix for AppleClang incorrect type casting + // TODO: Change to if constexpr when C++17 used as minimal standard + WriteSize(n, std::is_same{}); +} + +void VmaJsonWriter::WriteBool(bool b) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.Add(b ? "true" : "false"); +} + +void VmaJsonWriter::WriteNull() +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.Add("null"); +} + +void VmaJsonWriter::BeginValue(bool isString) +{ + if (!m_Stack.empty()) + { + StackItem& currItem = m_Stack.back(); + if (currItem.type == COLLECTION_TYPE_OBJECT && + currItem.valueCount % 2 == 0) + { + VMA_ASSERT(isString); + } + + if (currItem.type == COLLECTION_TYPE_OBJECT && + currItem.valueCount % 2 != 0) + { + m_SB.Add(": "); + } + else if (currItem.valueCount > 0) + { + m_SB.Add(", "); + WriteIndent(); + } + else + { + WriteIndent(); + } + ++currItem.valueCount; + } +} + +void VmaJsonWriter::WriteIndent(bool oneLess) +{ + if (!m_Stack.empty() && !m_Stack.back().singleLineMode) + { + m_SB.AddNewLine(); + + size_t count = m_Stack.size(); + if (count > 0 && oneLess) + { + --count; + } + for (size_t i = 0; i < count; ++i) + { + m_SB.Add(INDENT); + } + } +} +#endif // _VMA_JSON_WRITER_FUNCTIONS + +static void VmaPrintDetailedStatistics(VmaJsonWriter& json, const VmaDetailedStatistics& stat) +{ + json.BeginObject(); + + json.WriteString("BlockCount"); + json.WriteNumber(stat.statistics.blockCount); + json.WriteString("BlockBytes"); + json.WriteNumber(stat.statistics.blockBytes); + json.WriteString("AllocationCount"); + json.WriteNumber(stat.statistics.allocationCount); + json.WriteString("AllocationBytes"); + json.WriteNumber(stat.statistics.allocationBytes); + json.WriteString("UnusedRangeCount"); + json.WriteNumber(stat.unusedRangeCount); + + if (stat.statistics.allocationCount > 1) + { + json.WriteString("AllocationSizeMin"); + json.WriteNumber(stat.allocationSizeMin); + json.WriteString("AllocationSizeMax"); + json.WriteNumber(stat.allocationSizeMax); + } + if (stat.unusedRangeCount > 1) + { + json.WriteString("UnusedRangeSizeMin"); + json.WriteNumber(stat.unusedRangeSizeMin); + json.WriteString("UnusedRangeSizeMax"); + json.WriteNumber(stat.unusedRangeSizeMax); + } + json.EndObject(); +} +#endif // _VMA_JSON_WRITER + +#ifndef _VMA_MAPPING_HYSTERESIS + +class VmaMappingHysteresis +{ + VMA_CLASS_NO_COPY(VmaMappingHysteresis) +public: + VmaMappingHysteresis() = default; + + uint32_t GetExtraMapping() const { return m_ExtraMapping; } + + // Call when Map was called. + // Returns true if switched to extra +1 mapping reference count. + bool PostMap() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 0) + { + ++m_MajorCounter; + if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING) + { + m_ExtraMapping = 1; + m_MajorCounter = 0; + m_MinorCounter = 0; + return true; + } + } + else // m_ExtraMapping == 1 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + return false; + } + + // Call when Unmap was called. + void PostUnmap() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 0) + ++m_MajorCounter; + else // m_ExtraMapping == 1 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + } + + // Call when allocation was made from the memory block. + void PostAlloc() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 1) + ++m_MajorCounter; + else // m_ExtraMapping == 0 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + } + + // Call when allocation was freed from the memory block. + // Returns true if switched to extra -1 mapping reference count. + bool PostFree() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 1) + { + ++m_MajorCounter; + if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING && + m_MajorCounter > m_MinorCounter + 1) + { + m_ExtraMapping = 0; + m_MajorCounter = 0; + m_MinorCounter = 0; + return true; + } + } + else // m_ExtraMapping == 0 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + return false; + } + +private: + static const int32_t COUNTER_MIN_EXTRA_MAPPING = 7; + + uint32_t m_MinorCounter = 0; + uint32_t m_MajorCounter = 0; + uint32_t m_ExtraMapping = 0; // 0 or 1. + + void PostMinorCounter() + { + if(m_MinorCounter < m_MajorCounter) + { + ++m_MinorCounter; + } + else if(m_MajorCounter > 0) + { + --m_MajorCounter; + --m_MinorCounter; + } + } +}; + +#endif // _VMA_MAPPING_HYSTERESIS + +#ifndef _VMA_DEVICE_MEMORY_BLOCK +/* +Represents a single block of device memory (`VkDeviceMemory`) with all the +data about its regions (aka suballocations, #VmaAllocation), assigned and free. + +Thread-safety: +- Access to m_pMetadata must be externally synchronized. +- Map, Unmap, Bind* are synchronized internally. +*/ +class VmaDeviceMemoryBlock +{ + VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock) +public: + VmaBlockMetadata* m_pMetadata; + + VmaDeviceMemoryBlock(VmaAllocator hAllocator); + ~VmaDeviceMemoryBlock(); + + // Always call after construction. + void Init( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t newMemoryTypeIndex, + VkDeviceMemory newMemory, + VkDeviceSize newSize, + uint32_t id, + uint32_t algorithm, + VkDeviceSize bufferImageGranularity); + // Always call before destruction. + void Destroy(VmaAllocator allocator); + + VmaPool GetParentPool() const { return m_hParentPool; } + VkDeviceMemory GetDeviceMemory() const { return m_hMemory; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + uint32_t GetId() const { return m_Id; } + void* GetMappedData() const { return m_pMappedData; } + uint32_t GetMapRefCount() const { return m_MapCount; } + + // Call when allocation/free was made from m_pMetadata. + // Used for m_MappingHysteresis. + void PostAlloc() { m_MappingHysteresis.PostAlloc(); } + void PostFree(VmaAllocator hAllocator); + + // Validates all data structures inside this object. If not valid, returns false. + bool Validate() const; + VkResult CheckCorruption(VmaAllocator hAllocator); + + // ppData can be null. + VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData); + void Unmap(VmaAllocator hAllocator, uint32_t count); + + VkResult WriteMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize); + VkResult ValidateMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize); + + VkResult BindBufferMemory( + const VmaAllocator hAllocator, + const VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext); + VkResult BindImageMemory( + const VmaAllocator hAllocator, + const VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext); + +private: + VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. + uint32_t m_MemoryTypeIndex; + uint32_t m_Id; + VkDeviceMemory m_hMemory; + + /* + Protects access to m_hMemory so it is not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory. + Also protects m_MapCount, m_pMappedData. + Allocations, deallocations, any change in m_pMetadata is protected by parent's VmaBlockVector::m_Mutex. + */ + VMA_MUTEX m_MapAndBindMutex; + VmaMappingHysteresis m_MappingHysteresis; + uint32_t m_MapCount; + void* m_pMappedData; +}; +#endif // _VMA_DEVICE_MEMORY_BLOCK + +#ifndef _VMA_ALLOCATION_T +struct VmaAllocation_T +{ + friend struct VmaDedicatedAllocationListItemTraits; + + enum FLAGS + { + FLAG_PERSISTENT_MAP = 0x01, + FLAG_MAPPING_ALLOWED = 0x02, + }; + +public: + enum ALLOCATION_TYPE + { + ALLOCATION_TYPE_NONE, + ALLOCATION_TYPE_BLOCK, + ALLOCATION_TYPE_DEDICATED, + }; + + // This struct is allocated using VmaPoolAllocator. + VmaAllocation_T(bool mappingAllowed); + ~VmaAllocation_T(); + + void InitBlockAllocation( + VmaDeviceMemoryBlock* block, + VmaAllocHandle allocHandle, + VkDeviceSize alignment, + VkDeviceSize size, + uint32_t memoryTypeIndex, + VmaSuballocationType suballocationType, + bool mapped); + // pMappedData not null means allocation is created with MAPPED flag. + void InitDedicatedAllocation( + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceMemory hMemory, + VmaSuballocationType suballocationType, + void* pMappedData, + VkDeviceSize size); + + ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; } + VkDeviceSize GetAlignment() const { return m_Alignment; } + VkDeviceSize GetSize() const { return m_Size; } + void* GetUserData() const { return m_pUserData; } + const char* GetName() const { return m_pName; } + VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; } + + VmaDeviceMemoryBlock* GetBlock() const { VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK); return m_BlockAllocation.m_Block; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + bool IsPersistentMap() const { return (m_Flags & FLAG_PERSISTENT_MAP) != 0; } + bool IsMappingAllowed() const { return (m_Flags & FLAG_MAPPING_ALLOWED) != 0; } + + void SetUserData(VmaAllocator hAllocator, void* pUserData) { m_pUserData = pUserData; } + void SetName(VmaAllocator hAllocator, const char* pName); + void FreeName(VmaAllocator hAllocator); + uint8_t SwapBlockAllocation(VmaAllocator hAllocator, VmaAllocation allocation); + VmaAllocHandle GetAllocHandle() const; + VkDeviceSize GetOffset() const; + VmaPool GetParentPool() const; + VkDeviceMemory GetMemory() const; + void* GetMappedData() const; + + void BlockAllocMap(); + void BlockAllocUnmap(); + VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData); + void DedicatedAllocUnmap(VmaAllocator hAllocator); + +#if VMA_STATS_STRING_ENABLED + uint32_t GetBufferImageUsage() const { return m_BufferImageUsage; } + + void InitBufferImageUsage(uint32_t bufferImageUsage); + void PrintParameters(class VmaJsonWriter& json) const; +#endif + +private: + // Allocation out of VmaDeviceMemoryBlock. + struct BlockAllocation + { + VmaDeviceMemoryBlock* m_Block; + VmaAllocHandle m_AllocHandle; + }; + // Allocation for an object that has its own private VkDeviceMemory. + struct DedicatedAllocation + { + VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. + VkDeviceMemory m_hMemory; + void* m_pMappedData; // Not null means memory is mapped. + VmaAllocation_T* m_Prev; + VmaAllocation_T* m_Next; + }; + union + { + // Allocation out of VmaDeviceMemoryBlock. + BlockAllocation m_BlockAllocation; + // Allocation for an object that has its own private VkDeviceMemory. + DedicatedAllocation m_DedicatedAllocation; + }; + + VkDeviceSize m_Alignment; + VkDeviceSize m_Size; + void* m_pUserData; + char* m_pName; + uint32_t m_MemoryTypeIndex; + uint8_t m_Type; // ALLOCATION_TYPE + uint8_t m_SuballocationType; // VmaSuballocationType + // Reference counter for vmaMapMemory()/vmaUnmapMemory(). + uint8_t m_MapCount; + uint8_t m_Flags; // enum FLAGS +#if VMA_STATS_STRING_ENABLED + uint32_t m_BufferImageUsage; // 0 if unknown. +#endif +}; +#endif // _VMA_ALLOCATION_T + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS +struct VmaDedicatedAllocationListItemTraits +{ + typedef VmaAllocation_T ItemType; + + static ItemType* GetPrev(const ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Prev; + } + static ItemType* GetNext(const ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Next; + } + static ItemType*& AccessPrev(ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Prev; + } + static ItemType*& AccessNext(ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Next; + } +}; +#endif // _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST +/* +Stores linked list of VmaAllocation_T objects. +Thread-safe, synchronized internally. +*/ +class VmaDedicatedAllocationList +{ +public: + VmaDedicatedAllocationList() {} + ~VmaDedicatedAllocationList(); + + void Init(bool useMutex) { m_UseMutex = useMutex; } + bool Validate(); + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats); + void AddStatistics(VmaStatistics& inoutStats); +#if VMA_STATS_STRING_ENABLED + // Writes JSON array with the list of allocations. + void BuildStatsString(VmaJsonWriter& json); +#endif + + bool IsEmpty(); + void Register(VmaAllocation alloc); + void Unregister(VmaAllocation alloc); + +private: + typedef VmaIntrusiveLinkedList DedicatedAllocationLinkedList; + + bool m_UseMutex = true; + VMA_RW_MUTEX m_Mutex; + DedicatedAllocationLinkedList m_AllocationList; +}; + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS + +VmaDedicatedAllocationList::~VmaDedicatedAllocationList() +{ + VMA_HEAVY_ASSERT(Validate()); + + if (!m_AllocationList.IsEmpty()) + { + VMA_ASSERT(false && "Unfreed dedicated allocations found!"); + } +} + +bool VmaDedicatedAllocationList::Validate() +{ + const size_t declaredCount = m_AllocationList.GetCount(); + size_t actualCount = 0; + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + for (VmaAllocation alloc = m_AllocationList.Front(); + alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc)) + { + ++actualCount; + } + VMA_VALIDATE(actualCount == declaredCount); + + return true; +} + +void VmaDedicatedAllocationList::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) +{ + for(auto* item = m_AllocationList.Front(); item != nullptr; item = DedicatedAllocationLinkedList::GetNext(item)) + { + const VkDeviceSize size = item->GetSize(); + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += size; + VmaAddDetailedStatisticsAllocation(inoutStats, item->GetSize()); + } +} + +void VmaDedicatedAllocationList::AddStatistics(VmaStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + + const uint32_t allocCount = (uint32_t)m_AllocationList.GetCount(); + inoutStats.blockCount += allocCount; + inoutStats.allocationCount += allocCount; + + for(auto* item = m_AllocationList.Front(); item != nullptr; item = DedicatedAllocationLinkedList::GetNext(item)) + { + const VkDeviceSize size = item->GetSize(); + inoutStats.blockBytes += size; + inoutStats.allocationBytes += size; + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaDedicatedAllocationList::BuildStatsString(VmaJsonWriter& json) +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + json.BeginArray(); + for (VmaAllocation alloc = m_AllocationList.Front(); + alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc)) + { + json.BeginObject(true); + alloc->PrintParameters(json); + json.EndObject(); + } + json.EndArray(); +} +#endif // VMA_STATS_STRING_ENABLED + +bool VmaDedicatedAllocationList::IsEmpty() +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + return m_AllocationList.IsEmpty(); +} + +void VmaDedicatedAllocationList::Register(VmaAllocation alloc) +{ + VmaMutexLockWrite lock(m_Mutex, m_UseMutex); + m_AllocationList.PushBack(alloc); +} + +void VmaDedicatedAllocationList::Unregister(VmaAllocation alloc) +{ + VmaMutexLockWrite lock(m_Mutex, m_UseMutex); + m_AllocationList.Remove(alloc); +} +#endif // _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS +#endif // _VMA_DEDICATED_ALLOCATION_LIST + +#ifndef _VMA_SUBALLOCATION +/* +Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as +allocated memory block or free. +*/ +struct VmaSuballocation +{ + VkDeviceSize offset; + VkDeviceSize size; + void* userData; + VmaSuballocationType type; +}; + +// Comparator for offsets. +struct VmaSuballocationOffsetLess +{ + bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const + { + return lhs.offset < rhs.offset; + } +}; + +struct VmaSuballocationOffsetGreater +{ + bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const + { + return lhs.offset > rhs.offset; + } +}; + +struct VmaSuballocationItemSizeLess +{ + bool operator()(const VmaSuballocationList::iterator lhs, + const VmaSuballocationList::iterator rhs) const + { + return lhs->size < rhs->size; + } + + bool operator()(const VmaSuballocationList::iterator lhs, + VkDeviceSize rhsSize) const + { + return lhs->size < rhsSize; + } +}; +#endif // _VMA_SUBALLOCATION + +#ifndef _VMA_ALLOCATION_REQUEST +/* +Parameters of planned allocation inside a VmaDeviceMemoryBlock. +item points to a FREE suballocation. +*/ +struct VmaAllocationRequest +{ + VmaAllocHandle allocHandle; + VkDeviceSize size; + VmaSuballocationList::iterator item; + void* customData; + uint64_t algorithmData; + VmaAllocationRequestType type; +}; +#endif // _VMA_ALLOCATION_REQUEST + +#ifndef _VMA_BLOCK_METADATA +/* +Data structure used for bookkeeping of allocations and unused ranges of memory +in a single VkDeviceMemory block. +*/ +class VmaBlockMetadata +{ +public: + // pAllocationCallbacks, if not null, must be owned externally - alive and unchanged for the whole lifetime of this object. + VmaBlockMetadata(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata() = default; + + virtual void Init(VkDeviceSize size) { m_Size = size; } + bool IsVirtual() const { return m_IsVirtual; } + VkDeviceSize GetSize() const { return m_Size; } + + // Validates all data structures inside this object. If not valid, returns false. + virtual bool Validate() const = 0; + virtual size_t GetAllocationCount() const = 0; + virtual size_t GetFreeRegionsCount() const = 0; + virtual VkDeviceSize GetSumFreeSize() const = 0; + // Returns true if this block is empty - contains only single free suballocation. + virtual bool IsEmpty() const = 0; + virtual void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) = 0; + virtual VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const = 0; + virtual void* GetAllocationUserData(VmaAllocHandle allocHandle) const = 0; + + virtual VmaAllocHandle GetAllocationListBegin() const = 0; + virtual VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const = 0; + virtual VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const = 0; + + // Shouldn't modify blockCount. + virtual void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const = 0; + virtual void AddStatistics(VmaStatistics& inoutStats) const = 0; + +#if VMA_STATS_STRING_ENABLED + virtual void PrintDetailedMap(class VmaJsonWriter& json) const = 0; +#endif + + // Tries to find a place for suballocation with given parameters inside this block. + // If succeeded, fills pAllocationRequest and returns true. + // If failed, returns false. + virtual bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + // Always one of VMA_ALLOCATION_CREATE_STRATEGY_* or VMA_ALLOCATION_INTERNAL_STRATEGY_* flags. + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) = 0; + + virtual VkResult CheckCorruption(const void* pBlockData) = 0; + + // Makes actual allocation based on request. Request must already be checked and valid. + virtual void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) = 0; + + // Frees suballocation assigned to given memory region. + virtual void Free(VmaAllocHandle allocHandle) = 0; + + // Frees all allocations. + // Careful! Don't call it if there are VmaAllocation objects owned by userData of cleared allocations! + virtual void Clear() = 0; + + virtual void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) = 0; + virtual void DebugLogAllAllocations() const = 0; + +protected: + const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; } + VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; } + VkDeviceSize GetDebugMargin() const { return IsVirtual() ? 0 : VMA_DEBUG_MARGIN; } + + void DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size, void* userData) const; +#if VMA_STATS_STRING_ENABLED + // mapRefCount == UINT32_MAX means unspecified. + void PrintDetailedMap_Begin(class VmaJsonWriter& json, + VkDeviceSize unusedBytes, + size_t allocationCount, + size_t unusedRangeCount) const; + void PrintDetailedMap_Allocation(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size, void* userData) const; + void PrintDetailedMap_UnusedRange(class VmaJsonWriter& json, + VkDeviceSize offset, + VkDeviceSize size) const; + void PrintDetailedMap_End(class VmaJsonWriter& json) const; +#endif + +private: + VkDeviceSize m_Size; + const VkAllocationCallbacks* m_pAllocationCallbacks; + const VkDeviceSize m_BufferImageGranularity; + const bool m_IsVirtual; +}; + +#ifndef _VMA_BLOCK_METADATA_FUNCTIONS +VmaBlockMetadata::VmaBlockMetadata(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : m_Size(0), + m_pAllocationCallbacks(pAllocationCallbacks), + m_BufferImageGranularity(bufferImageGranularity), + m_IsVirtual(isVirtual) {} + +void VmaBlockMetadata::DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size, void* userData) const +{ + if (IsVirtual()) + { + VMA_DEBUG_LOG("UNFREED VIRTUAL ALLOCATION; Offset: %llu; Size: %llu; UserData: %p", offset, size, userData); + } + else + { + VMA_ASSERT(userData != VMA_NULL); + VmaAllocation allocation = reinterpret_cast(userData); + + userData = allocation->GetUserData(); + const char* name = allocation->GetName(); + +#if VMA_STATS_STRING_ENABLED + VMA_DEBUG_LOG("UNFREED ALLOCATION; Offset: %llu; Size: %llu; UserData: %p; Name: %s; Type: %s; Usage: %u", + offset, size, userData, name ? name : "vma_empty", + VMA_SUBALLOCATION_TYPE_NAMES[allocation->GetSuballocationType()], + allocation->GetBufferImageUsage()); +#else + VMA_DEBUG_LOG("UNFREED ALLOCATION; Offset: %llu; Size: %llu; UserData: %p; Name: %s; Type: %u", + offset, size, userData, name ? name : "vma_empty", + (uint32_t)allocation->GetSuballocationType()); +#endif // VMA_STATS_STRING_ENABLED + } + +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata::PrintDetailedMap_Begin(class VmaJsonWriter& json, + VkDeviceSize unusedBytes, size_t allocationCount, size_t unusedRangeCount) const +{ + json.WriteString("TotalBytes"); + json.WriteNumber(GetSize()); + + json.WriteString("UnusedBytes"); + json.WriteSize(unusedBytes); + + json.WriteString("Allocations"); + json.WriteSize(allocationCount); + + json.WriteString("UnusedRanges"); + json.WriteSize(unusedRangeCount); + + json.WriteString("Suballocations"); + json.BeginArray(); +} + +void VmaBlockMetadata::PrintDetailedMap_Allocation(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size, void* userData) const +{ + json.BeginObject(true); + + json.WriteString("Offset"); + json.WriteNumber(offset); + + if (IsVirtual()) + { + json.WriteString("Size"); + json.WriteNumber(size); + if (userData) + { + json.WriteString("CustomData"); + json.BeginString(); + json.ContinueString_Pointer(userData); + json.EndString(); + } + } + else + { + ((VmaAllocation)userData)->PrintParameters(json); + } + + json.EndObject(); +} + +void VmaBlockMetadata::PrintDetailedMap_UnusedRange(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size) const +{ + json.BeginObject(true); + + json.WriteString("Offset"); + json.WriteNumber(offset); + + json.WriteString("Type"); + json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]); + + json.WriteString("Size"); + json.WriteNumber(size); + + json.EndObject(); +} + +void VmaBlockMetadata::PrintDetailedMap_End(class VmaJsonWriter& json) const +{ + json.EndArray(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_BLOCK_METADATA_FUNCTIONS +#endif // _VMA_BLOCK_METADATA + +#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY +// Before deleting object of this class remember to call 'Destroy()' +class VmaBlockBufferImageGranularity final +{ +public: + struct ValidationContext + { + const VkAllocationCallbacks* allocCallbacks; + uint16_t* pageAllocs; + }; + + VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity); + ~VmaBlockBufferImageGranularity(); + + bool IsEnabled() const { return m_BufferImageGranularity > MAX_LOW_BUFFER_IMAGE_GRANULARITY; } + + void Init(const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size); + // Before destroying object you must call free it's memory + void Destroy(const VkAllocationCallbacks* pAllocationCallbacks); + + void RoundupAllocRequest(VmaSuballocationType allocType, + VkDeviceSize& inOutAllocSize, + VkDeviceSize& inOutAllocAlignment) const; + + bool CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset, + VkDeviceSize allocSize, + VkDeviceSize blockOffset, + VkDeviceSize blockSize, + VmaSuballocationType allocType) const; + + void AllocPages(uint8_t allocType, VkDeviceSize offset, VkDeviceSize size); + void FreePages(VkDeviceSize offset, VkDeviceSize size); + void Clear(); + + ValidationContext StartValidation(const VkAllocationCallbacks* pAllocationCallbacks, + bool isVirutal) const; + bool Validate(ValidationContext& ctx, VkDeviceSize offset, VkDeviceSize size) const; + bool FinishValidation(ValidationContext& ctx) const; + +private: + static const uint16_t MAX_LOW_BUFFER_IMAGE_GRANULARITY = 256; + + struct RegionInfo + { + uint8_t allocType; + uint16_t allocCount; + }; + + VkDeviceSize m_BufferImageGranularity; + uint32_t m_RegionCount; + RegionInfo* m_RegionInfo; + + uint32_t GetStartPage(VkDeviceSize offset) const { return OffsetToPageIndex(offset & ~(m_BufferImageGranularity - 1)); } + uint32_t GetEndPage(VkDeviceSize offset, VkDeviceSize size) const { return OffsetToPageIndex((offset + size - 1) & ~(m_BufferImageGranularity - 1)); } + + uint32_t OffsetToPageIndex(VkDeviceSize offset) const; + void AllocPage(RegionInfo& page, uint8_t allocType); +}; + +#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS +VmaBlockBufferImageGranularity::VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity) + : m_BufferImageGranularity(bufferImageGranularity), + m_RegionCount(0), + m_RegionInfo(VMA_NULL) {} + +VmaBlockBufferImageGranularity::~VmaBlockBufferImageGranularity() +{ + VMA_ASSERT(m_RegionInfo == VMA_NULL && "Free not called before destroying object!"); +} + +void VmaBlockBufferImageGranularity::Init(const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size) +{ + if (IsEnabled()) + { + m_RegionCount = static_cast(VmaDivideRoundingUp(size, m_BufferImageGranularity)); + m_RegionInfo = vma_new_array(pAllocationCallbacks, RegionInfo, m_RegionCount); + memset(m_RegionInfo, 0, m_RegionCount * sizeof(RegionInfo)); + } +} + +void VmaBlockBufferImageGranularity::Destroy(const VkAllocationCallbacks* pAllocationCallbacks) +{ + if (m_RegionInfo) + { + vma_delete_array(pAllocationCallbacks, m_RegionInfo, m_RegionCount); + m_RegionInfo = VMA_NULL; + } +} + +void VmaBlockBufferImageGranularity::RoundupAllocRequest(VmaSuballocationType allocType, + VkDeviceSize& inOutAllocSize, + VkDeviceSize& inOutAllocAlignment) const +{ + if (m_BufferImageGranularity > 1 && + m_BufferImageGranularity <= MAX_LOW_BUFFER_IMAGE_GRANULARITY) + { + if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN || + allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL) + { + inOutAllocAlignment = VMA_MAX(inOutAllocAlignment, m_BufferImageGranularity); + inOutAllocSize = VmaAlignUp(inOutAllocSize, m_BufferImageGranularity); + } + } +} + +bool VmaBlockBufferImageGranularity::CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset, + VkDeviceSize allocSize, + VkDeviceSize blockOffset, + VkDeviceSize blockSize, + VmaSuballocationType allocType) const +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(inOutAllocOffset); + if (m_RegionInfo[startPage].allocCount > 0 && + VmaIsBufferImageGranularityConflict(static_cast(m_RegionInfo[startPage].allocType), allocType)) + { + inOutAllocOffset = VmaAlignUp(inOutAllocOffset, m_BufferImageGranularity); + if (blockSize < allocSize + inOutAllocOffset - blockOffset) + return true; + ++startPage; + } + uint32_t endPage = GetEndPage(inOutAllocOffset, allocSize); + if (endPage != startPage && + m_RegionInfo[endPage].allocCount > 0 && + VmaIsBufferImageGranularityConflict(static_cast(m_RegionInfo[endPage].allocType), allocType)) + { + return true; + } + } + return false; +} + +void VmaBlockBufferImageGranularity::AllocPages(uint8_t allocType, VkDeviceSize offset, VkDeviceSize size) +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(offset); + AllocPage(m_RegionInfo[startPage], allocType); + + uint32_t endPage = GetEndPage(offset, size); + if (startPage != endPage) + AllocPage(m_RegionInfo[endPage], allocType); + } +} + +void VmaBlockBufferImageGranularity::FreePages(VkDeviceSize offset, VkDeviceSize size) +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(offset); + --m_RegionInfo[startPage].allocCount; + if (m_RegionInfo[startPage].allocCount == 0) + m_RegionInfo[startPage].allocType = VMA_SUBALLOCATION_TYPE_FREE; + uint32_t endPage = GetEndPage(offset, size); + if (startPage != endPage) + { + --m_RegionInfo[endPage].allocCount; + if (m_RegionInfo[endPage].allocCount == 0) + m_RegionInfo[endPage].allocType = VMA_SUBALLOCATION_TYPE_FREE; + } + } +} + +void VmaBlockBufferImageGranularity::Clear() +{ + if (m_RegionInfo) + memset(m_RegionInfo, 0, m_RegionCount * sizeof(RegionInfo)); +} + +VmaBlockBufferImageGranularity::ValidationContext VmaBlockBufferImageGranularity::StartValidation( + const VkAllocationCallbacks* pAllocationCallbacks, bool isVirutal) const +{ + ValidationContext ctx{ pAllocationCallbacks, VMA_NULL }; + if (!isVirutal && IsEnabled()) + { + ctx.pageAllocs = vma_new_array(pAllocationCallbacks, uint16_t, m_RegionCount); + memset(ctx.pageAllocs, 0, m_RegionCount * sizeof(uint16_t)); + } + return ctx; +} + +bool VmaBlockBufferImageGranularity::Validate(ValidationContext& ctx, + VkDeviceSize offset, VkDeviceSize size) const +{ + if (IsEnabled()) + { + uint32_t start = GetStartPage(offset); + ++ctx.pageAllocs[start]; + VMA_VALIDATE(m_RegionInfo[start].allocCount > 0); + + uint32_t end = GetEndPage(offset, size); + if (start != end) + { + ++ctx.pageAllocs[end]; + VMA_VALIDATE(m_RegionInfo[end].allocCount > 0); + } + } + return true; +} + +bool VmaBlockBufferImageGranularity::FinishValidation(ValidationContext& ctx) const +{ + // Check proper page structure + if (IsEnabled()) + { + VMA_ASSERT(ctx.pageAllocs != VMA_NULL && "Validation context not initialized!"); + + for (uint32_t page = 0; page < m_RegionCount; ++page) + { + VMA_VALIDATE(ctx.pageAllocs[page] == m_RegionInfo[page].allocCount); + } + vma_delete_array(ctx.allocCallbacks, ctx.pageAllocs, m_RegionCount); + ctx.pageAllocs = VMA_NULL; + } + return true; +} + +uint32_t VmaBlockBufferImageGranularity::OffsetToPageIndex(VkDeviceSize offset) const +{ + return static_cast(offset >> VMA_BITSCAN_MSB(m_BufferImageGranularity)); +} + +void VmaBlockBufferImageGranularity::AllocPage(RegionInfo& page, uint8_t allocType) +{ + // When current alloc type is free then it can be overriden by new type + if (page.allocCount == 0 || (page.allocCount > 0 && page.allocType == VMA_SUBALLOCATION_TYPE_FREE)) + page.allocType = allocType; + + ++page.allocCount; +} +#endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS +#endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY + +#if 0 +#ifndef _VMA_BLOCK_METADATA_GENERIC +class VmaBlockMetadata_Generic : public VmaBlockMetadata +{ + friend class VmaDefragmentationAlgorithm_Generic; + friend class VmaDefragmentationAlgorithm_Fast; + VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic) +public: + VmaBlockMetadata_Generic(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata_Generic() = default; + + size_t GetAllocationCount() const override { return m_Suballocations.size() - m_FreeCount; } + VkDeviceSize GetSumFreeSize() const override { return m_SumFreeSize; } + bool IsEmpty() const override { return (m_Suballocations.size() == 1) && (m_FreeCount == 1); } + void Free(VmaAllocHandle allocHandle) override { FreeSuballocation(FindAtOffset((VkDeviceSize)allocHandle - 1)); } + VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return (VkDeviceSize)allocHandle - 1; }; + + void Init(VkDeviceSize size) override; + bool Validate() const override; + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; + void AddStatistics(VmaStatistics& inoutStats) const override; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json, uint32_t mapRefCount) const override; +#endif + + bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) override; + + VkResult CheckCorruption(const void* pBlockData) override; + + void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) override; + + void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; + void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; + VmaAllocHandle GetAllocationListBegin() const override; + VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; + void Clear() override; + void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; + void DebugLogAllAllocations() const override; + +private: + uint32_t m_FreeCount; + VkDeviceSize m_SumFreeSize; + VmaSuballocationList m_Suballocations; + // Suballocations that are free. Sorted by size, ascending. + VmaVector> m_FreeSuballocationsBySize; + + VkDeviceSize AlignAllocationSize(VkDeviceSize size) const { return IsVirtual() ? size : VmaAlignUp(size, (VkDeviceSize)16); } + + VmaSuballocationList::iterator FindAtOffset(VkDeviceSize offset) const; + bool ValidateFreeSuballocationList() const; + + // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem. + // If yes, fills pOffset and returns true. If no, returns false. + bool CheckAllocation( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + VmaSuballocationList::const_iterator suballocItem, + VmaAllocHandle* pAllocHandle) const; + + // Given free suballocation, it merges it with following one, which must also be free. + void MergeFreeWithNext(VmaSuballocationList::iterator item); + // Releases given suballocation, making it free. + // Merges it with adjacent free suballocations if applicable. + // Returns iterator to new free suballocation at this place. + VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem); + // Given free suballocation, it inserts it into sorted list of + // m_FreeSuballocationsBySize if it is suitable. + void RegisterFreeSuballocation(VmaSuballocationList::iterator item); + // Given free suballocation, it removes it from sorted list of + // m_FreeSuballocationsBySize if it is suitable. + void UnregisterFreeSuballocation(VmaSuballocationList::iterator item); +}; + +#ifndef _VMA_BLOCK_METADATA_GENERIC_FUNCTIONS +VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), + m_FreeCount(0), + m_SumFreeSize(0), + m_Suballocations(VmaStlAllocator(pAllocationCallbacks)), + m_FreeSuballocationsBySize(VmaStlAllocator(pAllocationCallbacks)) {} + +void VmaBlockMetadata_Generic::Init(VkDeviceSize size) +{ + VmaBlockMetadata::Init(size); + + m_FreeCount = 1; + m_SumFreeSize = size; + + VmaSuballocation suballoc = {}; + suballoc.offset = 0; + suballoc.size = size; + suballoc.type = VMA_SUBALLOCATION_TYPE_FREE; + + m_Suballocations.push_back(suballoc); + m_FreeSuballocationsBySize.push_back(m_Suballocations.begin()); +} + +bool VmaBlockMetadata_Generic::Validate() const +{ + VMA_VALIDATE(!m_Suballocations.empty()); + + // Expected offset of new suballocation as calculated from previous ones. + VkDeviceSize calculatedOffset = 0; + // Expected number of free suballocations as calculated from traversing their list. + uint32_t calculatedFreeCount = 0; + // Expected sum size of free suballocations as calculated from traversing their list. + VkDeviceSize calculatedSumFreeSize = 0; + // Expected number of free suballocations that should be registered in + // m_FreeSuballocationsBySize calculated from traversing their list. + size_t freeSuballocationsToRegister = 0; + // True if previous visited suballocation was free. + bool prevFree = false; + + const VkDeviceSize debugMargin = GetDebugMargin(); + + for (const auto& subAlloc : m_Suballocations) + { + // Actual offset of this suballocation doesn't match expected one. + VMA_VALIDATE(subAlloc.offset == calculatedOffset); + + const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE); + // Two adjacent free suballocations are invalid. They should be merged. + VMA_VALIDATE(!prevFree || !currFree); + + VmaAllocation alloc = (VmaAllocation)subAlloc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + + if (currFree) + { + calculatedSumFreeSize += subAlloc.size; + ++calculatedFreeCount; + ++freeSuballocationsToRegister; + + // Margin required between allocations - every free space must be at least that large. + VMA_VALIDATE(subAlloc.size >= debugMargin); + } + else + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == subAlloc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == subAlloc.size); + } + + // Margin required between allocations - previous allocation must be free. + VMA_VALIDATE(debugMargin == 0 || prevFree); + } + + calculatedOffset += subAlloc.size; + prevFree = currFree; + } + + // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't + // match expected one. + VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister); + + VkDeviceSize lastSize = 0; + for (size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i) + { + VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i]; + + // Only free suballocations can be registered in m_FreeSuballocationsBySize. + VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE); + // They must be sorted by size ascending. + VMA_VALIDATE(suballocItem->size >= lastSize); + + lastSize = suballocItem->size; + } + + // Check if totals match calculated values. + VMA_VALIDATE(ValidateFreeSuballocationList()); + VMA_VALIDATE(calculatedOffset == GetSize()); + VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize); + VMA_VALIDATE(calculatedFreeCount == m_FreeCount); + + return true; +} + +void VmaBlockMetadata_Generic::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const +{ + const uint32_t rangeCount = (uint32_t)m_Suballocations.size(); + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += GetSize(); + + for (const auto& suballoc : m_Suballocations) + { + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + else + VmaAddDetailedStatisticsUnusedRange(inoutStats, suballoc.size); + } +} + +void VmaBlockMetadata_Generic::AddStatistics(VmaStatistics& inoutStats) const +{ + inoutStats.blockCount++; + inoutStats.allocationCount += (uint32_t)m_Suballocations.size() - m_FreeCount; + inoutStats.blockBytes += GetSize(); + inoutStats.allocationBytes += GetSize() - m_SumFreeSize; +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_Generic::PrintDetailedMap(class VmaJsonWriter& json, uint32_t mapRefCount) const +{ + PrintDetailedMap_Begin(json, + m_SumFreeSize, // unusedBytes + m_Suballocations.size() - (size_t)m_FreeCount, // allocationCount + m_FreeCount, // unusedRangeCount + mapRefCount); + + for (const auto& suballoc : m_Suballocations) + { + if (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE) + { + PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size); + } + else + { + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + } + } + + PrintDetailedMap_End(json); +} +#endif // VMA_STATS_STRING_ENABLED + +bool VmaBlockMetadata_Generic::CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(allocSize > 0); + VMA_ASSERT(!upperAddress); + VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); + VMA_ASSERT(pAllocationRequest != VMA_NULL); + VMA_HEAVY_ASSERT(Validate()); + + allocSize = AlignAllocationSize(allocSize); + + pAllocationRequest->type = VmaAllocationRequestType::Normal; + pAllocationRequest->size = allocSize; + + const VkDeviceSize debugMargin = GetDebugMargin(); + + // There is not enough total free space in this block to fulfill the request: Early return. + if (m_SumFreeSize < allocSize + debugMargin) + { + return false; + } + + // New algorithm, efficiently searching freeSuballocationsBySize. + const size_t freeSuballocCount = m_FreeSuballocationsBySize.size(); + if (freeSuballocCount > 0) + { + if (strategy == 0 || + strategy == VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT) + { + // Find first free suballocation with size not less than allocSize + debugMargin. + VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess( + m_FreeSuballocationsBySize.data(), + m_FreeSuballocationsBySize.data() + freeSuballocCount, + allocSize + debugMargin, + VmaSuballocationItemSizeLess()); + size_t index = it - m_FreeSuballocationsBySize.data(); + for (; index < freeSuballocCount; ++index) + { + if (CheckAllocation( + allocSize, + allocAlignment, + allocType, + m_FreeSuballocationsBySize[index], + &pAllocationRequest->allocHandle)) + { + pAllocationRequest->item = m_FreeSuballocationsBySize[index]; + return true; + } + } + } + else if (strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET) + { + for (VmaSuballocationList::iterator it = m_Suballocations.begin(); + it != m_Suballocations.end(); + ++it) + { + if (it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation( + allocSize, + allocAlignment, + allocType, + it, + &pAllocationRequest->allocHandle)) + { + pAllocationRequest->item = it; + return true; + } + } + } + else + { + VMA_ASSERT(strategy & (VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT | VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT )); + // Search staring from biggest suballocations. + for (size_t index = freeSuballocCount; index--; ) + { + if (CheckAllocation( + allocSize, + allocAlignment, + allocType, + m_FreeSuballocationsBySize[index], + &pAllocationRequest->allocHandle)) + { + pAllocationRequest->item = m_FreeSuballocationsBySize[index]; + return true; + } + } + } + } + + return false; +} + +VkResult VmaBlockMetadata_Generic::CheckCorruption(const void* pBlockData) +{ + for (auto& suballoc : m_Suballocations) + { + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + { + if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + return VK_SUCCESS; +} + +void VmaBlockMetadata_Generic::Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) +{ + VMA_ASSERT(request.type == VmaAllocationRequestType::Normal); + VMA_ASSERT(request.item != m_Suballocations.end()); + VmaSuballocation& suballoc = *request.item; + // Given suballocation is a free block. + VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + // Given offset is inside this suballocation. + VMA_ASSERT((VkDeviceSize)request.allocHandle - 1 >= suballoc.offset); + const VkDeviceSize paddingBegin = (VkDeviceSize)request.allocHandle - suballoc.offset - 1; + VMA_ASSERT(suballoc.size >= paddingBegin + request.size); + const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - request.size; + + // Unregister this free suballocation from m_FreeSuballocationsBySize and update + // it to become used. + UnregisterFreeSuballocation(request.item); + + suballoc.offset = (VkDeviceSize)request.allocHandle - 1; + suballoc.size = request.size; + suballoc.type = type; + suballoc.userData = userData; + + // If there are any free bytes remaining at the end, insert new free suballocation after current one. + if (paddingEnd) + { + VmaSuballocation paddingSuballoc = {}; + paddingSuballoc.offset = suballoc.offset + suballoc.size; + paddingSuballoc.size = paddingEnd; + paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; + VmaSuballocationList::iterator next = request.item; + ++next; + const VmaSuballocationList::iterator paddingEndItem = + m_Suballocations.insert(next, paddingSuballoc); + RegisterFreeSuballocation(paddingEndItem); + } + + // If there are any free bytes remaining at the beginning, insert new free suballocation before current one. + if (paddingBegin) + { + VmaSuballocation paddingSuballoc = {}; + paddingSuballoc.offset = suballoc.offset - paddingBegin; + paddingSuballoc.size = paddingBegin; + paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; + const VmaSuballocationList::iterator paddingBeginItem = + m_Suballocations.insert(request.item, paddingSuballoc); + RegisterFreeSuballocation(paddingBeginItem); + } + + // Update totals. + m_FreeCount = m_FreeCount - 1; + if (paddingBegin > 0) + { + ++m_FreeCount; + } + if (paddingEnd > 0) + { + ++m_FreeCount; + } + m_SumFreeSize -= request.size; +} + +void VmaBlockMetadata_Generic::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) +{ + outInfo.offset = (VkDeviceSize)allocHandle - 1; + const VmaSuballocation& suballoc = *FindAtOffset(outInfo.offset); + outInfo.size = suballoc.size; + outInfo.pUserData = suballoc.userData; +} + +void* VmaBlockMetadata_Generic::GetAllocationUserData(VmaAllocHandle allocHandle) const +{ + return FindAtOffset((VkDeviceSize)allocHandle - 1)->userData; +} + +VmaAllocHandle VmaBlockMetadata_Generic::GetAllocationListBegin() const +{ + if (IsEmpty()) + return VK_NULL_HANDLE; + + for (const auto& suballoc : m_Suballocations) + { + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + return (VmaAllocHandle)(suballoc.offset + 1); + } + VMA_ASSERT(false && "Should contain at least 1 allocation!"); + return VK_NULL_HANDLE; +} + +VmaAllocHandle VmaBlockMetadata_Generic::GetNextAllocation(VmaAllocHandle prevAlloc) const +{ + VmaSuballocationList::const_iterator prev = FindAtOffset((VkDeviceSize)prevAlloc - 1); + + for (VmaSuballocationList::const_iterator it = ++prev; it != m_Suballocations.end(); ++it) + { + if (it->type != VMA_SUBALLOCATION_TYPE_FREE) + return (VmaAllocHandle)(it->offset + 1); + } + return VK_NULL_HANDLE; +} + +void VmaBlockMetadata_Generic::Clear() +{ + const VkDeviceSize size = GetSize(); + + VMA_ASSERT(IsVirtual()); + m_FreeCount = 1; + m_SumFreeSize = size; + m_Suballocations.clear(); + m_FreeSuballocationsBySize.clear(); + + VmaSuballocation suballoc = {}; + suballoc.offset = 0; + suballoc.size = size; + suballoc.type = VMA_SUBALLOCATION_TYPE_FREE; + m_Suballocations.push_back(suballoc); + + m_FreeSuballocationsBySize.push_back(m_Suballocations.begin()); +} + +void VmaBlockMetadata_Generic::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) +{ + VmaSuballocation& suballoc = *FindAtOffset((VkDeviceSize)allocHandle - 1); + suballoc.userData = userData; +} + +void VmaBlockMetadata_Generic::DebugLogAllAllocations() const +{ + for (const auto& suballoc : m_Suballocations) + { + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + DebugLogAllocation(suballoc.offset, suballoc.size, suballoc.userData); + } +} + +VmaSuballocationList::iterator VmaBlockMetadata_Generic::FindAtOffset(VkDeviceSize offset) const +{ + VMA_HEAVY_ASSERT(!m_Suballocations.empty()); + const VkDeviceSize last = m_Suballocations.rbegin()->offset; + if (last == offset) + return m_Suballocations.rbegin().drop_const(); + const VkDeviceSize first = m_Suballocations.begin()->offset; + if (first == offset) + return m_Suballocations.begin().drop_const(); + + const size_t suballocCount = m_Suballocations.size(); + const VkDeviceSize step = (last - first + m_Suballocations.begin()->size) / suballocCount; + auto findSuballocation = [&](auto begin, auto end) -> VmaSuballocationList::iterator + { + for (auto suballocItem = begin; + suballocItem != end; + ++suballocItem) + { + if (suballocItem->offset == offset) + return suballocItem.drop_const(); + } + VMA_ASSERT(false && "Not found!"); + return m_Suballocations.end().drop_const(); + }; + // If requested offset is closer to the end of range, search from the end + if (offset - first > suballocCount * step / 2) + { + return findSuballocation(m_Suballocations.rbegin(), m_Suballocations.rend()); + } + return findSuballocation(m_Suballocations.begin(), m_Suballocations.end()); +} + +bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList() const +{ + VkDeviceSize lastSize = 0; + for (size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i) + { + const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i]; + + VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE); + VMA_VALIDATE(it->size >= lastSize); + lastSize = it->size; + } + return true; +} + +bool VmaBlockMetadata_Generic::CheckAllocation( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + VmaSuballocationList::const_iterator suballocItem, + VmaAllocHandle* pAllocHandle) const +{ + VMA_ASSERT(allocSize > 0); + VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); + VMA_ASSERT(suballocItem != m_Suballocations.cend()); + VMA_ASSERT(pAllocHandle != VMA_NULL); + + const VkDeviceSize debugMargin = GetDebugMargin(); + const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); + + const VmaSuballocation& suballoc = *suballocItem; + VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + // Size of this suballocation is too small for this request: Early return. + if (suballoc.size < allocSize) + { + return false; + } + + // Start from offset equal to beginning of this suballocation. + VkDeviceSize offset = suballoc.offset + (suballocItem == m_Suballocations.cbegin() ? 0 : GetDebugMargin()); + + // Apply debugMargin from the end of previous alloc. + if (debugMargin > 0) + { + offset += debugMargin; + } + + // Apply alignment. + offset = VmaAlignUp(offset, allocAlignment); + + // Check previous suballocations for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment) + { + bool bufferImageGranularityConflict = false; + VmaSuballocationList::const_iterator prevSuballocItem = suballocItem; + while (prevSuballocItem != m_Suballocations.cbegin()) + { + --prevSuballocItem; + const VmaSuballocation& prevSuballoc = *prevSuballocItem; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + offset = VmaAlignUp(offset, bufferImageGranularity); + } + } + + // Calculate padding at the beginning based on current offset. + const VkDeviceSize paddingBegin = offset - suballoc.offset; + + // Fail if requested size plus margin after is bigger than size of this suballocation. + if (paddingBegin + allocSize + debugMargin > suballoc.size) + { + return false; + } + + // Check next suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if (allocSize % bufferImageGranularity || offset % bufferImageGranularity) + { + VmaSuballocationList::const_iterator nextSuballocItem = suballocItem; + ++nextSuballocItem; + while (nextSuballocItem != m_Suballocations.cend()) + { + const VmaSuballocation& nextSuballoc = *nextSuballocItem; + if (VmaBlocksOnSamePage(offset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) + { + return false; + } + } + else + { + // Already on next page. + break; + } + ++nextSuballocItem; + } + } + + *pAllocHandle = (VmaAllocHandle)(offset + 1); + // All tests passed: Success. pAllocHandle is already filled. + return true; +} + +void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item) +{ + VMA_ASSERT(item != m_Suballocations.end()); + VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaSuballocationList::iterator nextItem = item; + ++nextItem; + VMA_ASSERT(nextItem != m_Suballocations.end()); + VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE); + + item->size += nextItem->size; + --m_FreeCount; + m_Suballocations.erase(nextItem); +} + +VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem) +{ + // Change this suballocation to be marked as free. + VmaSuballocation& suballoc = *suballocItem; + suballoc.type = VMA_SUBALLOCATION_TYPE_FREE; + suballoc.userData = VMA_NULL; + + // Update totals. + ++m_FreeCount; + m_SumFreeSize += suballoc.size; + + // Merge with previous and/or next suballocation if it's also free. + bool mergeWithNext = false; + bool mergeWithPrev = false; + + VmaSuballocationList::iterator nextItem = suballocItem; + ++nextItem; + if ((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)) + { + mergeWithNext = true; + } + + VmaSuballocationList::iterator prevItem = suballocItem; + if (suballocItem != m_Suballocations.begin()) + { + --prevItem; + if (prevItem->type == VMA_SUBALLOCATION_TYPE_FREE) + { + mergeWithPrev = true; + } + } + + if (mergeWithNext) + { + UnregisterFreeSuballocation(nextItem); + MergeFreeWithNext(suballocItem); + } + + if (mergeWithPrev) + { + UnregisterFreeSuballocation(prevItem); + MergeFreeWithNext(prevItem); + RegisterFreeSuballocation(prevItem); + return prevItem; + } + else + { + RegisterFreeSuballocation(suballocItem); + return suballocItem; + } +} + +void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item) +{ + VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); + VMA_ASSERT(item->size > 0); + + // You may want to enable this validation at the beginning or at the end of + // this function, depending on what do you want to check. + VMA_HEAVY_ASSERT(ValidateFreeSuballocationList()); + + if (m_FreeSuballocationsBySize.empty()) + { + m_FreeSuballocationsBySize.push_back(item); + } + else + { + VmaVectorInsertSorted(m_FreeSuballocationsBySize, item); + } + + //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList()); +} + +void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item) +{ + VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); + VMA_ASSERT(item->size > 0); + + // You may want to enable this validation at the beginning or at the end of + // this function, depending on what do you want to check. + VMA_HEAVY_ASSERT(ValidateFreeSuballocationList()); + + VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess( + m_FreeSuballocationsBySize.data(), + m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(), + item, + VmaSuballocationItemSizeLess()); + for (size_t index = it - m_FreeSuballocationsBySize.data(); + index < m_FreeSuballocationsBySize.size(); + ++index) + { + if (m_FreeSuballocationsBySize[index] == item) + { + VmaVectorRemove(m_FreeSuballocationsBySize, index); + return; + } + VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found."); + } + VMA_ASSERT(0 && "Not found."); + + //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList()); +} +#endif // _VMA_BLOCK_METADATA_GENERIC_FUNCTIONS +#endif // _VMA_BLOCK_METADATA_GENERIC +#endif // #if 0 + +#ifndef _VMA_BLOCK_METADATA_LINEAR +/* +Allocations and their references in internal data structure look like this: + +if(m_2ndVectorMode == SECOND_VECTOR_EMPTY): + + 0 +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | + | | + | | +GetSize() +-------+ + +if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER): + + 0 +-------+ + | Alloc | 2nd[0] + +-------+ + | Alloc | 2nd[1] + +-------+ + | ... | + +-------+ + | Alloc | 2nd[2nd.size() - 1] + +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | +GetSize() +-------+ + +if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK): + + 0 +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | + | | + | | + +-------+ + | Alloc | 2nd[2nd.size() - 1] + +-------+ + | ... | + +-------+ + | Alloc | 2nd[1] + +-------+ + | Alloc | 2nd[0] +GetSize() +-------+ + +*/ +class VmaBlockMetadata_Linear : public VmaBlockMetadata +{ + VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear) +public: + VmaBlockMetadata_Linear(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata_Linear() = default; + + VkDeviceSize GetSumFreeSize() const override { return m_SumFreeSize; } + bool IsEmpty() const override { return GetAllocationCount() == 0; } + VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return (VkDeviceSize)allocHandle - 1; }; + + void Init(VkDeviceSize size) override; + bool Validate() const override; + size_t GetAllocationCount() const override; + size_t GetFreeRegionsCount() const override; + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; + void AddStatistics(VmaStatistics& inoutStats) const override; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json) const override; +#endif + + bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) override; + + VkResult CheckCorruption(const void* pBlockData) override; + + void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) override; + + void Free(VmaAllocHandle allocHandle) override; + void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; + void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; + VmaAllocHandle GetAllocationListBegin() const override; + VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; + VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const override; + void Clear() override; + void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; + void DebugLogAllAllocations() const override; + +private: + /* + There are two suballocation vectors, used in ping-pong way. + The one with index m_1stVectorIndex is called 1st. + The one with index (m_1stVectorIndex ^ 1) is called 2nd. + 2nd can be non-empty only when 1st is not empty. + When 2nd is not empty, m_2ndVectorMode indicates its mode of operation. + */ + typedef VmaVector> SuballocationVectorType; + + enum SECOND_VECTOR_MODE + { + SECOND_VECTOR_EMPTY, + /* + Suballocations in 2nd vector are created later than the ones in 1st, but they + all have smaller offset. + */ + SECOND_VECTOR_RING_BUFFER, + /* + Suballocations in 2nd vector are upper side of double stack. + They all have offsets higher than those in 1st vector. + Top of this stack means smaller offsets, but higher indices in this vector. + */ + SECOND_VECTOR_DOUBLE_STACK, + }; + + VkDeviceSize m_SumFreeSize; + SuballocationVectorType m_Suballocations0, m_Suballocations1; + uint32_t m_1stVectorIndex; + SECOND_VECTOR_MODE m_2ndVectorMode; + // Number of items in 1st vector with hAllocation = null at the beginning. + size_t m_1stNullItemsBeginCount; + // Number of other items in 1st vector with hAllocation = null somewhere in the middle. + size_t m_1stNullItemsMiddleCount; + // Number of items in 2nd vector with hAllocation = null. + size_t m_2ndNullItemsCount; + + SuballocationVectorType& AccessSuballocations1st() { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; } + SuballocationVectorType& AccessSuballocations2nd() { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; } + const SuballocationVectorType& AccessSuballocations1st() const { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; } + const SuballocationVectorType& AccessSuballocations2nd() const { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; } + + VmaSuballocation& FindSuballocation(VkDeviceSize offset) const; + bool ShouldCompact1st() const; + void CleanupAfterFree(); + + bool CreateAllocationRequest_LowerAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest); + bool CreateAllocationRequest_UpperAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest); +}; + +#ifndef _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS +VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), + m_SumFreeSize(0), + m_Suballocations0(VmaStlAllocator(pAllocationCallbacks)), + m_Suballocations1(VmaStlAllocator(pAllocationCallbacks)), + m_1stVectorIndex(0), + m_2ndVectorMode(SECOND_VECTOR_EMPTY), + m_1stNullItemsBeginCount(0), + m_1stNullItemsMiddleCount(0), + m_2ndNullItemsCount(0) {} + +void VmaBlockMetadata_Linear::Init(VkDeviceSize size) +{ + VmaBlockMetadata::Init(size); + m_SumFreeSize = size; +} + +bool VmaBlockMetadata_Linear::Validate() const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY)); + VMA_VALIDATE(!suballocations1st.empty() || + suballocations2nd.empty() || + m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER); + + if (!suballocations1st.empty()) + { + // Null item at the beginning should be accounted into m_1stNullItemsBeginCount. + VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].type != VMA_SUBALLOCATION_TYPE_FREE); + // Null item at the end should be just pop_back(). + VMA_VALIDATE(suballocations1st.back().type != VMA_SUBALLOCATION_TYPE_FREE); + } + if (!suballocations2nd.empty()) + { + // Null item at the end should be just pop_back(). + VMA_VALIDATE(suballocations2nd.back().type != VMA_SUBALLOCATION_TYPE_FREE); + } + + VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size()); + VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size()); + + VkDeviceSize sumUsedSize = 0; + const size_t suballoc1stCount = suballocations1st.size(); + const VkDeviceSize debugMargin = GetDebugMargin(); + VkDeviceSize offset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const size_t suballoc2ndCount = suballocations2nd.size(); + size_t nullItem2ndCount = 0; + for (size_t i = 0; i < suballoc2ndCount; ++i) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem2ndCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + + VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount); + } + + for (size_t i = 0; i < m_1stNullItemsBeginCount; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE && + suballoc.userData == VMA_NULL); + } + + size_t nullItem1stCount = m_1stNullItemsBeginCount; + + for (size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem1stCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount); + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + const size_t suballoc2ndCount = suballocations2nd.size(); + size_t nullItem2ndCount = 0; + for (size_t i = suballoc2ndCount; i--; ) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem2ndCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + + VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount); + } + + VMA_VALIDATE(offset <= GetSize()); + VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize); + + return true; +} + +size_t VmaBlockMetadata_Linear::GetAllocationCount() const +{ + return AccessSuballocations1st().size() - m_1stNullItemsBeginCount - m_1stNullItemsMiddleCount + + AccessSuballocations2nd().size() - m_2ndNullItemsCount; +} + +size_t VmaBlockMetadata_Linear::GetFreeRegionsCount() const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return SIZE_MAX; +} + +void VmaBlockMetadata_Linear::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const +{ + const VkDeviceSize size = GetSize(); + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += size; + + VkDeviceSize lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + if (lastOffset < freeSpace2ndTo1stEnd) + { + const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + if (lastOffset < freeSpace1stTo2ndEnd) + { + const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to size. + if (lastOffset < size) + { + const VkDeviceSize unusedRangeSize = size - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = size; + } + } + } +} + +void VmaBlockMetadata_Linear::AddStatistics(VmaStatistics& inoutStats) const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const VkDeviceSize size = GetSize(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + inoutStats.blockCount++; + inoutStats.blockBytes += size; + inoutStats.allocationBytes += size - m_SumFreeSize; + + VkDeviceSize lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace2ndTo1stEnd) + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace1stTo2ndEnd) + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < size) + { + // There is free space from lastOffset to size. + const VkDeviceSize unusedRangeSize = size - lastOffset; + } + + // End of loop. + lastOffset = size; + } + } + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_Linear::PrintDetailedMap(class VmaJsonWriter& json) const +{ + const VkDeviceSize size = GetSize(); + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + // FIRST PASS + + size_t unusedRangeCount = 0; + VkDeviceSize usedBytes = 0; + + VkDeviceSize lastOffset = 0; + + size_t alloc2ndCount = 0; + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc2ndCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace2ndTo1stEnd) + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + size_t alloc1stCount = 0; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc1stCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + if (lastOffset < size) + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc2ndCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < size) + { + // There is free space from lastOffset to size. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = size; + } + } + } + + const VkDeviceSize unusedBytes = size - usedBytes; + PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount); + + // SECOND PASS + lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace2ndTo1stEnd) + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + nextAlloc1stIndex = m_1stNullItemsBeginCount; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace1stTo2ndEnd) + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < size) + { + // There is free space from lastOffset to size. + const VkDeviceSize unusedRangeSize = size - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = size; + } + } + } + + PrintDetailedMap_End(json); +} +#endif // VMA_STATS_STRING_ENABLED + +bool VmaBlockMetadata_Linear::CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(allocSize > 0); + VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); + VMA_ASSERT(pAllocationRequest != VMA_NULL); + VMA_HEAVY_ASSERT(Validate()); + pAllocationRequest->size = allocSize; + return upperAddress ? + CreateAllocationRequest_UpperAddress( + allocSize, allocAlignment, allocType, strategy, pAllocationRequest) : + CreateAllocationRequest_LowerAddress( + allocSize, allocAlignment, allocType, strategy, pAllocationRequest); +} + +VkResult VmaBlockMetadata_Linear::CheckCorruption(const void* pBlockData) +{ + VMA_ASSERT(!IsVirtual()); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + for (size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + { + if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + for (size_t i = 0, count = suballocations2nd.size(); i < count; ++i) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + { + if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + return VK_SUCCESS; +} + +void VmaBlockMetadata_Linear::Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) +{ + const VkDeviceSize offset = (VkDeviceSize)request.allocHandle - 1; + const VmaSuballocation newSuballoc = { offset, request.size, userData, type }; + + switch (request.type) + { + case VmaAllocationRequestType::UpperAddress: + { + VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER && + "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer."); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + suballocations2nd.push_back(newSuballoc); + m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK; + } + break; + case VmaAllocationRequestType::EndOf1st: + { + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + + VMA_ASSERT(suballocations1st.empty() || + offset >= suballocations1st.back().offset + suballocations1st.back().size); + // Check if it fits before the end of the block. + VMA_ASSERT(offset + request.size <= GetSize()); + + suballocations1st.push_back(newSuballoc); + } + break; + case VmaAllocationRequestType::EndOf2nd: + { + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + // New allocation at the end of 2-part ring buffer, so before first allocation from 1st vector. + VMA_ASSERT(!suballocations1st.empty() && + offset + request.size <= suballocations1st[m_1stNullItemsBeginCount].offset); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + switch (m_2ndVectorMode) + { + case SECOND_VECTOR_EMPTY: + // First allocation from second part ring buffer. + VMA_ASSERT(suballocations2nd.empty()); + m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER; + break; + case SECOND_VECTOR_RING_BUFFER: + // 2-part ring buffer is already started. + VMA_ASSERT(!suballocations2nd.empty()); + break; + case SECOND_VECTOR_DOUBLE_STACK: + VMA_ASSERT(0 && "CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack."); + break; + default: + VMA_ASSERT(0); + } + + suballocations2nd.push_back(newSuballoc); + } + break; + default: + VMA_ASSERT(0 && "CRITICAL INTERNAL ERROR."); + } + + m_SumFreeSize -= newSuballoc.size; +} + +void VmaBlockMetadata_Linear::Free(VmaAllocHandle allocHandle) +{ + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + VkDeviceSize offset = (VkDeviceSize)allocHandle - 1; + + if (!suballocations1st.empty()) + { + // First allocation: Mark it as next empty at the beginning. + VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount]; + if (firstSuballoc.offset == offset) + { + firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; + firstSuballoc.userData = VMA_NULL; + m_SumFreeSize += firstSuballoc.size; + ++m_1stNullItemsBeginCount; + CleanupAfterFree(); + return; + } + } + + // Last allocation in 2-part ring buffer or top of upper stack (same logic). + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER || + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + VmaSuballocation& lastSuballoc = suballocations2nd.back(); + if (lastSuballoc.offset == offset) + { + m_SumFreeSize += lastSuballoc.size; + suballocations2nd.pop_back(); + CleanupAfterFree(); + return; + } + } + // Last allocation in 1st vector. + else if (m_2ndVectorMode == SECOND_VECTOR_EMPTY) + { + VmaSuballocation& lastSuballoc = suballocations1st.back(); + if (lastSuballoc.offset == offset) + { + m_SumFreeSize += lastSuballoc.size; + suballocations1st.pop_back(); + CleanupAfterFree(); + return; + } + } + + VmaSuballocation refSuballoc; + refSuballoc.offset = offset; + // Rest of members stays uninitialized intentionally for better performance. + + // Item from the middle of 1st vector. + { + const SuballocationVectorType::iterator it = VmaBinaryFindSorted( + suballocations1st.begin() + m_1stNullItemsBeginCount, + suballocations1st.end(), + refSuballoc, + VmaSuballocationOffsetLess()); + if (it != suballocations1st.end()) + { + it->type = VMA_SUBALLOCATION_TYPE_FREE; + it->userData = VMA_NULL; + ++m_1stNullItemsMiddleCount; + m_SumFreeSize += it->size; + CleanupAfterFree(); + return; + } + } + + if (m_2ndVectorMode != SECOND_VECTOR_EMPTY) + { + // Item from the middle of 2nd vector. + const SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ? + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) : + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater()); + if (it != suballocations2nd.end()) + { + it->type = VMA_SUBALLOCATION_TYPE_FREE; + it->userData = VMA_NULL; + ++m_2ndNullItemsCount; + m_SumFreeSize += it->size; + CleanupAfterFree(); + return; + } + } + + VMA_ASSERT(0 && "Allocation to free not found in linear allocator!"); +} + +void VmaBlockMetadata_Linear::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) +{ + outInfo.offset = (VkDeviceSize)allocHandle - 1; + VmaSuballocation& suballoc = FindSuballocation(outInfo.offset); + outInfo.size = suballoc.size; + outInfo.pUserData = suballoc.userData; +} + +void* VmaBlockMetadata_Linear::GetAllocationUserData(VmaAllocHandle allocHandle) const +{ + return FindSuballocation((VkDeviceSize)allocHandle - 1).userData; +} + +VmaAllocHandle VmaBlockMetadata_Linear::GetAllocationListBegin() const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return VK_NULL_HANDLE; +} + +VmaAllocHandle VmaBlockMetadata_Linear::GetNextAllocation(VmaAllocHandle prevAlloc) const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return VK_NULL_HANDLE; +} + +VkDeviceSize VmaBlockMetadata_Linear::GetNextFreeRegionSize(VmaAllocHandle alloc) const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return 0; +} + +void VmaBlockMetadata_Linear::Clear() +{ + m_SumFreeSize = GetSize(); + m_Suballocations0.clear(); + m_Suballocations1.clear(); + // Leaving m_1stVectorIndex unchanged - it doesn't matter. + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + m_2ndNullItemsCount = 0; +} + +void VmaBlockMetadata_Linear::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) +{ + VmaSuballocation& suballoc = FindSuballocation((VkDeviceSize)allocHandle - 1); + suballoc.userData = userData; +} + +void VmaBlockMetadata_Linear::DebugLogAllAllocations() const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + for (auto it = suballocations1st.begin() + m_1stNullItemsBeginCount; it != suballocations1st.end(); ++it) + if (it->type != VMA_SUBALLOCATION_TYPE_FREE) + DebugLogAllocation(it->offset, it->size, it->userData); + + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + for (auto it = suballocations2nd.begin(); it != suballocations2nd.end(); ++it) + if (it->type != VMA_SUBALLOCATION_TYPE_FREE) + DebugLogAllocation(it->offset, it->size, it->userData); +} + +VmaSuballocation& VmaBlockMetadata_Linear::FindSuballocation(VkDeviceSize offset) const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + VmaSuballocation refSuballoc; + refSuballoc.offset = offset; + // Rest of members stays uninitialized intentionally for better performance. + + // Item from the 1st vector. + { + SuballocationVectorType::const_iterator it = VmaBinaryFindSorted( + suballocations1st.begin() + m_1stNullItemsBeginCount, + suballocations1st.end(), + refSuballoc, + VmaSuballocationOffsetLess()); + if (it != suballocations1st.end()) + { + return const_cast(*it); + } + } + + if (m_2ndVectorMode != SECOND_VECTOR_EMPTY) + { + // Rest of members stays uninitialized intentionally for better performance. + SuballocationVectorType::const_iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ? + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) : + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater()); + if (it != suballocations2nd.end()) + { + return const_cast(*it); + } + } + + VMA_ASSERT(0 && "Allocation not found in linear allocator!"); + return const_cast(suballocations1st.back()); // Should never occur. +} + +bool VmaBlockMetadata_Linear::ShouldCompact1st() const +{ + const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount; + const size_t suballocCount = AccessSuballocations1st().size(); + return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3; +} + +void VmaBlockMetadata_Linear::CleanupAfterFree() +{ + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (IsEmpty()) + { + suballocations1st.clear(); + suballocations2nd.clear(); + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + m_2ndNullItemsCount = 0; + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + } + else + { + const size_t suballoc1stCount = suballocations1st.size(); + const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount; + VMA_ASSERT(nullItem1stCount <= suballoc1stCount); + + // Find more null items at the beginning of 1st vector. + while (m_1stNullItemsBeginCount < suballoc1stCount && + suballocations1st[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++m_1stNullItemsBeginCount; + --m_1stNullItemsMiddleCount; + } + + // Find more null items at the end of 1st vector. + while (m_1stNullItemsMiddleCount > 0 && + suballocations1st.back().type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_1stNullItemsMiddleCount; + suballocations1st.pop_back(); + } + + // Find more null items at the end of 2nd vector. + while (m_2ndNullItemsCount > 0 && + suballocations2nd.back().type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_2ndNullItemsCount; + suballocations2nd.pop_back(); + } + + // Find more null items at the beginning of 2nd vector. + while (m_2ndNullItemsCount > 0 && + suballocations2nd[0].type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_2ndNullItemsCount; + VmaVectorRemove(suballocations2nd, 0); + } + + if (ShouldCompact1st()) + { + const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount; + size_t srcIndex = m_1stNullItemsBeginCount; + for (size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex) + { + while (suballocations1st[srcIndex].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++srcIndex; + } + if (dstIndex != srcIndex) + { + suballocations1st[dstIndex] = suballocations1st[srcIndex]; + } + ++srcIndex; + } + suballocations1st.resize(nonNullItemCount); + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + } + + // 2nd vector became empty. + if (suballocations2nd.empty()) + { + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + } + + // 1st vector became empty. + if (suballocations1st.size() - m_1stNullItemsBeginCount == 0) + { + suballocations1st.clear(); + m_1stNullItemsBeginCount = 0; + + if (!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + // Swap 1st with 2nd. Now 2nd is empty. + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + m_1stNullItemsMiddleCount = m_2ndNullItemsCount; + while (m_1stNullItemsBeginCount < suballocations2nd.size() && + suballocations2nd[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++m_1stNullItemsBeginCount; + --m_1stNullItemsMiddleCount; + } + m_2ndNullItemsCount = 0; + m_1stVectorIndex ^= 1; + } + } + } + + VMA_HEAVY_ASSERT(Validate()); +} + +bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + const VkDeviceSize blockSize = GetSize(); + const VkDeviceSize debugMargin = GetDebugMargin(); + const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + // Try to allocate at the end of 1st vector. + + VkDeviceSize resultBaseOffset = 0; + if (!suballocations1st.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations1st.back(); + resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin; + } + + // Start from offset equal to beginning of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + // Apply alignment. + resultOffset = VmaAlignUp(resultOffset, allocAlignment); + + // Check previous suballocations for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity); + } + } + + const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? + suballocations2nd.back().offset : blockSize; + + // There is enough free space at the end after alignment. + if (resultOffset + allocSize + debugMargin <= freeSpaceEnd) + { + // Check next suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if ((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + for (size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; ) + { + const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) + { + return false; + } + } + else + { + // Already on previous page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + // pAllocationRequest->item, customData unused. + pAllocationRequest->type = VmaAllocationRequestType::EndOf1st; + return true; + } + } + + // Wrap-around to end of 2nd vector. Try to allocate there, watching for the + // beginning of 1st vector as the end of free space. + if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + VMA_ASSERT(!suballocations1st.empty()); + + VkDeviceSize resultBaseOffset = 0; + if (!suballocations2nd.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations2nd.back(); + resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin; + } + + // Start from offset equal to beginning of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + // Apply alignment. + resultOffset = VmaAlignUp(resultOffset, allocAlignment); + + // Check previous suballocations for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity); + } + } + + size_t index1st = m_1stNullItemsBeginCount; + + // There is enough free space at the end after alignment. + if ((index1st == suballocations1st.size() && resultOffset + allocSize + debugMargin <= blockSize) || + (index1st < suballocations1st.size() && resultOffset + allocSize + debugMargin <= suballocations1st[index1st].offset)) + { + // Check next suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if (allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) + { + for (size_t nextSuballocIndex = index1st; + nextSuballocIndex < suballocations1st.size(); + nextSuballocIndex++) + { + const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) + { + return false; + } + } + else + { + // Already on next page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd; + // pAllocationRequest->item, customData unused. + return true; + } + } + + return false; +} + +bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + const VkDeviceSize blockSize = GetSize(); + const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + VMA_ASSERT(0 && "Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer."); + return false; + } + + // Try to allocate before 2nd.back(), or end of block if 2nd.empty(). + if (allocSize > blockSize) + { + return false; + } + VkDeviceSize resultBaseOffset = blockSize - allocSize; + if (!suballocations2nd.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations2nd.back(); + resultBaseOffset = lastSuballoc.offset - allocSize; + if (allocSize > lastSuballoc.offset) + { + return false; + } + } + + // Start from offset equal to end of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + const VkDeviceSize debugMargin = GetDebugMargin(); + + // Apply debugMargin at the end. + if (debugMargin > 0) + { + if (resultOffset < debugMargin) + { + return false; + } + resultOffset -= debugMargin; + } + + // Apply alignment. + resultOffset = VmaAlignDown(resultOffset, allocAlignment); + + // Check next suballocations from 2nd for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; ) + { + const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity); + } + } + + // There is enough free space. + const VkDeviceSize endOf1st = !suballocations1st.empty() ? + suballocations1st.back().offset + suballocations1st.back().size : + 0; + if (endOf1st + debugMargin <= resultOffset) + { + // Check previous suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if (bufferImageGranularity > 1) + { + for (size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type)) + { + return false; + } + } + else + { + // Already on next page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + // pAllocationRequest->item unused. + pAllocationRequest->type = VmaAllocationRequestType::UpperAddress; + return true; + } + + return false; +} +#endif // _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS +#endif // _VMA_BLOCK_METADATA_LINEAR + +#if 0 +#ifndef _VMA_BLOCK_METADATA_BUDDY +/* +- GetSize() is the original size of allocated memory block. +- m_UsableSize is this size aligned down to a power of two. + All allocations and calculations happen relative to m_UsableSize. +- GetUnusableSize() is the difference between them. + It is reported as separate, unused range, not available for allocations. + +Node at level 0 has size = m_UsableSize. +Each next level contains nodes with size 2 times smaller than current level. +m_LevelCount is the maximum number of levels to use in the current object. +*/ +class VmaBlockMetadata_Buddy : public VmaBlockMetadata +{ + VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy) +public: + VmaBlockMetadata_Buddy(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata_Buddy(); + + size_t GetAllocationCount() const override { return m_AllocationCount; } + VkDeviceSize GetSumFreeSize() const override { return m_SumFreeSize + GetUnusableSize(); } + bool IsEmpty() const override { return m_Root->type == Node::TYPE_FREE; } + VkResult CheckCorruption(const void* pBlockData) override { return VK_ERROR_FEATURE_NOT_PRESENT; } + VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return (VkDeviceSize)allocHandle - 1; }; + void DebugLogAllAllocations() const override { DebugLogAllAllocationNode(m_Root, 0); } + + void Init(VkDeviceSize size) override; + bool Validate() const override; + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; + void AddStatistics(VmaStatistics& inoutStats) const override; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json, uint32_t mapRefCount) const override; +#endif + + bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) override; + + void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) override; + + void Free(VmaAllocHandle allocHandle) override; + void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; + void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; + VmaAllocHandle GetAllocationListBegin() const override; + VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; + void Clear() override; + void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; + +private: + static const size_t MAX_LEVELS = 48; + + struct ValidationContext + { + size_t calculatedAllocationCount = 0; + size_t calculatedFreeCount = 0; + VkDeviceSize calculatedSumFreeSize = 0; + }; + struct Node + { + VkDeviceSize offset; + enum TYPE + { + TYPE_FREE, + TYPE_ALLOCATION, + TYPE_SPLIT, + TYPE_COUNT + } type; + Node* parent; + Node* buddy; + + union + { + struct + { + Node* prev; + Node* next; + } free; + struct + { + void* userData; + } allocation; + struct + { + Node* leftChild; + } split; + }; + }; + + // Size of the memory block aligned down to a power of two. + VkDeviceSize m_UsableSize; + uint32_t m_LevelCount; + VmaPoolAllocator m_NodeAllocator; + Node* m_Root; + struct + { + Node* front; + Node* back; + } m_FreeList[MAX_LEVELS]; + + // Number of nodes in the tree with type == TYPE_ALLOCATION. + size_t m_AllocationCount; + // Number of nodes in the tree with type == TYPE_FREE. + size_t m_FreeCount; + // Doesn't include space wasted due to internal fragmentation - allocation sizes are just aligned up to node sizes. + // Doesn't include unusable size. + VkDeviceSize m_SumFreeSize; + + VkDeviceSize GetUnusableSize() const { return GetSize() - m_UsableSize; } + VkDeviceSize LevelToNodeSize(uint32_t level) const { return m_UsableSize >> level; } + + VkDeviceSize AlignAllocationSize(VkDeviceSize size) const + { + if (!IsVirtual()) + { + size = VmaAlignUp(size, (VkDeviceSize)16); + } + return VmaNextPow2(size); + } + Node* FindAllocationNode(VkDeviceSize offset, uint32_t& outLevel) const; + void DeleteNodeChildren(Node* node); + bool ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const; + uint32_t AllocSizeToLevel(VkDeviceSize allocSize) const; + void AddNodeToDetailedStatistics(VmaDetailedStatistics& inoutStats, const Node* node, VkDeviceSize levelNodeSize) const; + // Adds node to the front of FreeList at given level. + // node->type must be FREE. + // node->free.prev, next can be undefined. + void AddToFreeListFront(uint32_t level, Node* node); + // Removes node from FreeList at given level. + // node->type must be FREE. + // node->free.prev, next stay untouched. + void RemoveFromFreeList(uint32_t level, Node* node); + void DebugLogAllAllocationNode(Node* node, uint32_t level) const; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const; +#endif +}; + +#ifndef _VMA_BLOCK_METADATA_BUDDY_FUNCTIONS +VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), + m_NodeAllocator(pAllocationCallbacks, 32), // firstBlockCapacity + m_Root(VMA_NULL), + m_AllocationCount(0), + m_FreeCount(1), + m_SumFreeSize(0) +{ + memset(m_FreeList, 0, sizeof(m_FreeList)); +} + +VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy() +{ + DeleteNodeChildren(m_Root); + m_NodeAllocator.Free(m_Root); +} + +void VmaBlockMetadata_Buddy::Init(VkDeviceSize size) +{ + VmaBlockMetadata::Init(size); + + m_UsableSize = VmaPrevPow2(size); + m_SumFreeSize = m_UsableSize; + + // Calculate m_LevelCount. + const VkDeviceSize minNodeSize = IsVirtual() ? 1 : 16; + m_LevelCount = 1; + while (m_LevelCount < MAX_LEVELS && + LevelToNodeSize(m_LevelCount) >= minNodeSize) + { + ++m_LevelCount; + } + + Node* rootNode = m_NodeAllocator.Alloc(); + rootNode->offset = 0; + rootNode->type = Node::TYPE_FREE; + rootNode->parent = VMA_NULL; + rootNode->buddy = VMA_NULL; + + m_Root = rootNode; + AddToFreeListFront(0, rootNode); +} + +bool VmaBlockMetadata_Buddy::Validate() const +{ + // Validate tree. + ValidationContext ctx; + if (!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0))) + { + VMA_VALIDATE(false && "ValidateNode failed."); + } + VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount); + VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize); + + // Validate free node lists. + for (uint32_t level = 0; level < m_LevelCount; ++level) + { + VMA_VALIDATE(m_FreeList[level].front == VMA_NULL || + m_FreeList[level].front->free.prev == VMA_NULL); + + for (Node* node = m_FreeList[level].front; + node != VMA_NULL; + node = node->free.next) + { + VMA_VALIDATE(node->type == Node::TYPE_FREE); + + if (node->free.next == VMA_NULL) + { + VMA_VALIDATE(m_FreeList[level].back == node); + } + else + { + VMA_VALIDATE(node->free.next->free.prev == node); + } + } + } + + // Validate that free lists ar higher levels are empty. + for (uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level) + { + VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL); + } + + return true; +} + +void VmaBlockMetadata_Buddy::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const +{ + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += GetSize(); + + AddNodeToDetailedStatistics(inoutStats, m_Root, LevelToNodeSize(0)); + + const VkDeviceSize unusableSize = GetUnusableSize(); + if (unusableSize > 0) + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusableSize); +} + +void VmaBlockMetadata_Buddy::AddStatistics(VmaStatistics& inoutStats) const +{ + inoutStats.blockCount++; + inoutStats.allocationCount += (uint32_t)m_AllocationCount; + inoutStats.blockBytes += GetSize(); + inoutStats.allocationBytes += GetSize() - m_SumFreeSize; +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_Buddy::PrintDetailedMap(class VmaJsonWriter& json, uint32_t mapRefCount) const +{ + VmaDetailedStatistics stats; + VmaClearDetailedStatistics(stats); + AddDetailedStatistics(stats); + + PrintDetailedMap_Begin( + json, + stats.statistics.blockBytes - stats.statistics.allocationBytes, + stats.statistics.allocationCount, + stats.unusedRangeCount, + mapRefCount); + + PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0)); + + const VkDeviceSize unusableSize = GetUnusableSize(); + if (unusableSize > 0) + { + PrintDetailedMap_UnusedRange(json, + m_UsableSize, // offset + unusableSize); // size + } + + PrintDetailedMap_End(json); +} +#endif // VMA_STATS_STRING_ENABLED + +bool VmaBlockMetadata_Buddy::CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm."); + + allocSize = AlignAllocationSize(allocSize); + + // Simple way to respect bufferImageGranularity. May be optimized some day. + // Whenever it might be an OPTIMAL image... + if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN || + allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL) + { + allocAlignment = VMA_MAX(allocAlignment, GetBufferImageGranularity()); + allocSize = VmaAlignUp(allocSize, GetBufferImageGranularity()); + } + + if (allocSize > m_UsableSize) + { + return false; + } + + const uint32_t targetLevel = AllocSizeToLevel(allocSize); + for (uint32_t level = targetLevel; level--; ) + { + for (Node* freeNode = m_FreeList[level].front; + freeNode != VMA_NULL; + freeNode = freeNode->free.next) + { + if (freeNode->offset % allocAlignment == 0) + { + pAllocationRequest->type = VmaAllocationRequestType::Normal; + pAllocationRequest->allocHandle = (VmaAllocHandle)(freeNode->offset + 1); + pAllocationRequest->size = allocSize; + pAllocationRequest->customData = (void*)(uintptr_t)level; + return true; + } + } + } + + return false; +} + +void VmaBlockMetadata_Buddy::Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) +{ + VMA_ASSERT(request.type == VmaAllocationRequestType::Normal); + + const uint32_t targetLevel = AllocSizeToLevel(request.size); + uint32_t currLevel = (uint32_t)(uintptr_t)request.customData; + + Node* currNode = m_FreeList[currLevel].front; + VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE); + const VkDeviceSize offset = (VkDeviceSize)request.allocHandle - 1; + while (currNode->offset != offset) + { + currNode = currNode->free.next; + VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE); + } + + // Go down, splitting free nodes. + while (currLevel < targetLevel) + { + // currNode is already first free node at currLevel. + // Remove it from list of free nodes at this currLevel. + RemoveFromFreeList(currLevel, currNode); + + const uint32_t childrenLevel = currLevel + 1; + + // Create two free sub-nodes. + Node* leftChild = m_NodeAllocator.Alloc(); + Node* rightChild = m_NodeAllocator.Alloc(); + + leftChild->offset = currNode->offset; + leftChild->type = Node::TYPE_FREE; + leftChild->parent = currNode; + leftChild->buddy = rightChild; + + rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel); + rightChild->type = Node::TYPE_FREE; + rightChild->parent = currNode; + rightChild->buddy = leftChild; + + // Convert current currNode to split type. + currNode->type = Node::TYPE_SPLIT; + currNode->split.leftChild = leftChild; + + // Add child nodes to free list. Order is important! + AddToFreeListFront(childrenLevel, rightChild); + AddToFreeListFront(childrenLevel, leftChild); + + ++m_FreeCount; + ++currLevel; + currNode = m_FreeList[currLevel].front; + + /* + We can be sure that currNode, as left child of node previously split, + also fulfills the alignment requirement. + */ + } + + // Remove from free list. + VMA_ASSERT(currLevel == targetLevel && + currNode != VMA_NULL && + currNode->type == Node::TYPE_FREE); + RemoveFromFreeList(currLevel, currNode); + + // Convert to allocation node. + currNode->type = Node::TYPE_ALLOCATION; + currNode->allocation.userData = userData; + + ++m_AllocationCount; + --m_FreeCount; + m_SumFreeSize -= request.size; +} + +void VmaBlockMetadata_Buddy::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) +{ + uint32_t level = 0; + outInfo.offset = (VkDeviceSize)allocHandle - 1; + const Node* const node = FindAllocationNode(outInfo.offset, level); + outInfo.size = LevelToNodeSize(level); + outInfo.pUserData = node->allocation.userData; +} + +void* VmaBlockMetadata_Buddy::GetAllocationUserData(VmaAllocHandle allocHandle) const +{ + uint32_t level = 0; + const Node* const node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level); + return node->allocation.userData; +} + +VmaAllocHandle VmaBlockMetadata_Buddy::GetAllocationListBegin() const +{ + // Function only used for defragmentation, which is disabled for this algorithm + return VK_NULL_HANDLE; +} + +VmaAllocHandle VmaBlockMetadata_Buddy::GetNextAllocation(VmaAllocHandle prevAlloc) const +{ + // Function only used for defragmentation, which is disabled for this algorithm + return VK_NULL_HANDLE; +} + +void VmaBlockMetadata_Buddy::DeleteNodeChildren(Node* node) +{ + if (node->type == Node::TYPE_SPLIT) + { + DeleteNodeChildren(node->split.leftChild->buddy); + DeleteNodeChildren(node->split.leftChild); + const VkAllocationCallbacks* allocationCallbacks = GetAllocationCallbacks(); + m_NodeAllocator.Free(node->split.leftChild->buddy); + m_NodeAllocator.Free(node->split.leftChild); + } +} + +void VmaBlockMetadata_Buddy::Clear() +{ + DeleteNodeChildren(m_Root); + m_Root->type = Node::TYPE_FREE; + m_AllocationCount = 0; + m_FreeCount = 1; + m_SumFreeSize = m_UsableSize; +} + +void VmaBlockMetadata_Buddy::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) +{ + uint32_t level = 0; + Node* const node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level); + node->allocation.userData = userData; +} + +VmaBlockMetadata_Buddy::Node* VmaBlockMetadata_Buddy::FindAllocationNode(VkDeviceSize offset, uint32_t& outLevel) const +{ + Node* node = m_Root; + VkDeviceSize nodeOffset = 0; + outLevel = 0; + VkDeviceSize levelNodeSize = LevelToNodeSize(0); + while (node->type == Node::TYPE_SPLIT) + { + const VkDeviceSize nextLevelNodeSize = levelNodeSize >> 1; + if (offset < nodeOffset + nextLevelNodeSize) + { + node = node->split.leftChild; + } + else + { + node = node->split.leftChild->buddy; + nodeOffset += nextLevelNodeSize; + } + ++outLevel; + levelNodeSize = nextLevelNodeSize; + } + + VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION); + return node; +} + +bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const +{ + VMA_VALIDATE(level < m_LevelCount); + VMA_VALIDATE(curr->parent == parent); + VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL)); + VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr); + switch (curr->type) + { + case Node::TYPE_FREE: + // curr->free.prev, next are validated separately. + ctx.calculatedSumFreeSize += levelNodeSize; + ++ctx.calculatedFreeCount; + break; + case Node::TYPE_ALLOCATION: + ++ctx.calculatedAllocationCount; + if (!IsVirtual()) + { + VMA_VALIDATE(curr->allocation.userData != VMA_NULL); + } + break; + case Node::TYPE_SPLIT: + { + const uint32_t childrenLevel = level + 1; + const VkDeviceSize childrenLevelNodeSize = levelNodeSize >> 1; + const Node* const leftChild = curr->split.leftChild; + VMA_VALIDATE(leftChild != VMA_NULL); + VMA_VALIDATE(leftChild->offset == curr->offset); + if (!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize)) + { + VMA_VALIDATE(false && "ValidateNode for left child failed."); + } + const Node* const rightChild = leftChild->buddy; + VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize); + if (!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize)) + { + VMA_VALIDATE(false && "ValidateNode for right child failed."); + } + } + break; + default: + return false; + } + + return true; +} + +uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize) const +{ + // I know this could be optimized somehow e.g. by using std::log2p1 from C++20. + uint32_t level = 0; + VkDeviceSize currLevelNodeSize = m_UsableSize; + VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1; + while (allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount) + { + ++level; + currLevelNodeSize >>= 1; + nextLevelNodeSize >>= 1; + } + return level; +} + +void VmaBlockMetadata_Buddy::Free(VmaAllocHandle allocHandle) +{ + uint32_t level = 0; + Node* node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level); + + ++m_FreeCount; + --m_AllocationCount; + m_SumFreeSize += LevelToNodeSize(level); + + node->type = Node::TYPE_FREE; + + // Join free nodes if possible. + while (level > 0 && node->buddy->type == Node::TYPE_FREE) + { + RemoveFromFreeList(level, node->buddy); + Node* const parent = node->parent; + + m_NodeAllocator.Free(node->buddy); + m_NodeAllocator.Free(node); + parent->type = Node::TYPE_FREE; + + node = parent; + --level; + --m_FreeCount; + } + + AddToFreeListFront(level, node); +} + +void VmaBlockMetadata_Buddy::AddNodeToDetailedStatistics(VmaDetailedStatistics& inoutStats, const Node* node, VkDeviceSize levelNodeSize) const +{ + switch (node->type) + { + case Node::TYPE_FREE: + VmaAddDetailedStatisticsUnusedRange(inoutStats, levelNodeSize); + break; + case Node::TYPE_ALLOCATION: + VmaAddDetailedStatisticsAllocation(inoutStats, levelNodeSize); + break; + case Node::TYPE_SPLIT: + { + const VkDeviceSize childrenNodeSize = levelNodeSize / 2; + const Node* const leftChild = node->split.leftChild; + AddNodeToDetailedStatistics(inoutStats, leftChild, childrenNodeSize); + const Node* const rightChild = leftChild->buddy; + AddNodeToDetailedStatistics(inoutStats, rightChild, childrenNodeSize); + } + break; + default: + VMA_ASSERT(0); + } +} + +void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node) +{ + VMA_ASSERT(node->type == Node::TYPE_FREE); + + // List is empty. + Node* const frontNode = m_FreeList[level].front; + if (frontNode == VMA_NULL) + { + VMA_ASSERT(m_FreeList[level].back == VMA_NULL); + node->free.prev = node->free.next = VMA_NULL; + m_FreeList[level].front = m_FreeList[level].back = node; + } + else + { + VMA_ASSERT(frontNode->free.prev == VMA_NULL); + node->free.prev = VMA_NULL; + node->free.next = frontNode; + frontNode->free.prev = node; + m_FreeList[level].front = node; + } +} + +void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node) +{ + VMA_ASSERT(m_FreeList[level].front != VMA_NULL); + + // It is at the front. + if (node->free.prev == VMA_NULL) + { + VMA_ASSERT(m_FreeList[level].front == node); + m_FreeList[level].front = node->free.next; + } + else + { + Node* const prevFreeNode = node->free.prev; + VMA_ASSERT(prevFreeNode->free.next == node); + prevFreeNode->free.next = node->free.next; + } + + // It is at the back. + if (node->free.next == VMA_NULL) + { + VMA_ASSERT(m_FreeList[level].back == node); + m_FreeList[level].back = node->free.prev; + } + else + { + Node* const nextFreeNode = node->free.next; + VMA_ASSERT(nextFreeNode->free.prev == node); + nextFreeNode->free.prev = node->free.prev; + } +} + +void VmaBlockMetadata_Buddy::DebugLogAllAllocationNode(Node* node, uint32_t level) const +{ + switch (node->type) + { + case Node::TYPE_FREE: + break; + case Node::TYPE_ALLOCATION: + DebugLogAllocation(node->offset, LevelToNodeSize(level), node->allocation.userData); + break; + case Node::TYPE_SPLIT: + { + ++level; + DebugLogAllAllocationNode(node->split.leftChild, level); + DebugLogAllAllocationNode(node->split.leftChild->buddy, level); + } + break; + default: + VMA_ASSERT(0); + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_Buddy::PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const +{ + switch (node->type) + { + case Node::TYPE_FREE: + PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize); + break; + case Node::TYPE_ALLOCATION: + PrintDetailedMap_Allocation(json, node->offset, levelNodeSize, node->allocation.userData); + break; + case Node::TYPE_SPLIT: + { + const VkDeviceSize childrenNodeSize = levelNodeSize / 2; + const Node* const leftChild = node->split.leftChild; + PrintDetailedMapNode(json, leftChild, childrenNodeSize); + const Node* const rightChild = leftChild->buddy; + PrintDetailedMapNode(json, rightChild, childrenNodeSize); + } + break; + default: + VMA_ASSERT(0); + } +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_BLOCK_METADATA_BUDDY_FUNCTIONS +#endif // _VMA_BLOCK_METADATA_BUDDY +#endif // #if 0 + +#ifndef _VMA_BLOCK_METADATA_TLSF +// To not search current larger region if first allocation won't succeed and skip to smaller range +// use with VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT as strategy in CreateAllocationRequest(). +// When fragmentation and reusal of previous blocks doesn't matter then use with +// VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT for fastest alloc time possible. +class VmaBlockMetadata_TLSF : public VmaBlockMetadata +{ + VMA_CLASS_NO_COPY(VmaBlockMetadata_TLSF) +public: + VmaBlockMetadata_TLSF(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata_TLSF(); + + size_t GetAllocationCount() const override { return m_AllocCount; } + size_t GetFreeRegionsCount() const override { return m_BlocksFreeCount + 1; } + VkDeviceSize GetSumFreeSize() const override { return m_BlocksFreeSize + m_NullBlock->size; } + bool IsEmpty() const override { return m_NullBlock->offset == 0; } + VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return ((Block*)allocHandle)->offset; }; + + void Init(VkDeviceSize size) override; + bool Validate() const override; + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; + void AddStatistics(VmaStatistics& inoutStats) const override; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json) const override; +#endif + + bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) override; + + VkResult CheckCorruption(const void* pBlockData) override; + void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) override; + + void Free(VmaAllocHandle allocHandle) override; + void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; + void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; + VmaAllocHandle GetAllocationListBegin() const override; + VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; + VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const override; + void Clear() override; + void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; + void DebugLogAllAllocations() const override; + +private: + // According to original paper it should be preferable 4 or 5: + // M. Masmano, I. Ripoll, A. Crespo, and J. Real "TLSF: a New Dynamic Memory Allocator for Real-Time Systems" + // http://www.gii.upv.es/tlsf/files/ecrts04_tlsf.pdf + static const uint8_t SECOND_LEVEL_INDEX = 5; + static const uint16_t SMALL_BUFFER_SIZE = 256; + static const uint32_t INITIAL_BLOCK_ALLOC_COUNT = 16; + static const uint8_t MEMORY_CLASS_SHIFT = 7; + static const uint8_t MAX_MEMORY_CLASSES = 65 - MEMORY_CLASS_SHIFT; + + class Block + { + public: + VkDeviceSize offset; + VkDeviceSize size; + Block* prevPhysical; + Block* nextPhysical; + + void MarkFree() { prevFree = VMA_NULL; } + void MarkTaken() { prevFree = this; } + bool IsFree() const { return prevFree != this; } + void*& UserData() { VMA_HEAVY_ASSERT(!IsFree()); return userData; } + Block*& PrevFree() { return prevFree; } + Block*& NextFree() { VMA_HEAVY_ASSERT(IsFree()); return nextFree; } + + private: + Block* prevFree; // Address of the same block here indicates that block is taken + union + { + Block* nextFree; + void* userData; + }; + }; + + size_t m_AllocCount; + // Total number of free blocks besides null block + size_t m_BlocksFreeCount; + // Total size of free blocks excluding null block + VkDeviceSize m_BlocksFreeSize; + uint32_t m_IsFreeBitmap; + uint8_t m_MemoryClasses; + uint32_t m_InnerIsFreeBitmap[MAX_MEMORY_CLASSES]; + uint32_t m_ListsCount; + /* + * 0: 0-3 lists for small buffers + * 1+: 0-(2^SLI-1) lists for normal buffers + */ + Block** m_FreeList; + VmaPoolAllocator m_BlockAllocator; + Block* m_NullBlock; + VmaBlockBufferImageGranularity m_GranularityHandler; + + uint8_t SizeToMemoryClass(VkDeviceSize size) const; + uint16_t SizeToSecondIndex(VkDeviceSize size, uint8_t memoryClass) const; + uint32_t GetListIndex(uint8_t memoryClass, uint16_t secondIndex) const; + uint32_t GetListIndex(VkDeviceSize size) const; + + void RemoveFreeBlock(Block* block); + void InsertFreeBlock(Block* block); + void MergeBlock(Block* block, Block* prev); + + Block* FindFreeBlock(VkDeviceSize size, uint32_t& listIndex) const; + bool CheckBlock( + Block& block, + uint32_t listIndex, + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + VmaAllocationRequest* pAllocationRequest); +}; + +#ifndef _VMA_BLOCK_METADATA_TLSF_FUNCTIONS +VmaBlockMetadata_TLSF::VmaBlockMetadata_TLSF(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), + m_AllocCount(0), + m_BlocksFreeCount(0), + m_BlocksFreeSize(0), + m_IsFreeBitmap(0), + m_MemoryClasses(0), + m_ListsCount(0), + m_FreeList(VMA_NULL), + m_BlockAllocator(pAllocationCallbacks, INITIAL_BLOCK_ALLOC_COUNT), + m_NullBlock(VMA_NULL), + m_GranularityHandler(bufferImageGranularity) {} + +VmaBlockMetadata_TLSF::~VmaBlockMetadata_TLSF() +{ + if (m_FreeList) + vma_delete_array(GetAllocationCallbacks(), m_FreeList, m_ListsCount); + m_GranularityHandler.Destroy(GetAllocationCallbacks()); +} + +void VmaBlockMetadata_TLSF::Init(VkDeviceSize size) +{ + VmaBlockMetadata::Init(size); + + if (!IsVirtual()) + m_GranularityHandler.Init(GetAllocationCallbacks(), size); + + m_NullBlock = m_BlockAllocator.Alloc(); + m_NullBlock->size = size; + m_NullBlock->offset = 0; + m_NullBlock->prevPhysical = VMA_NULL; + m_NullBlock->nextPhysical = VMA_NULL; + m_NullBlock->MarkFree(); + m_NullBlock->NextFree() = VMA_NULL; + m_NullBlock->PrevFree() = VMA_NULL; + uint8_t memoryClass = SizeToMemoryClass(size); + uint16_t sli = SizeToSecondIndex(size, memoryClass); + m_ListsCount = (memoryClass == 0 ? 0 : (memoryClass - 1) * (1UL << SECOND_LEVEL_INDEX) + sli) + 1; + if (IsVirtual()) + m_ListsCount += 1UL << SECOND_LEVEL_INDEX; + else + m_ListsCount += 4; + + m_MemoryClasses = memoryClass + 2; + memset(m_InnerIsFreeBitmap, 0, MAX_MEMORY_CLASSES * sizeof(uint32_t)); + + m_FreeList = vma_new_array(GetAllocationCallbacks(), Block*, m_ListsCount); + memset(m_FreeList, 0, m_ListsCount * sizeof(Block*)); +} + +bool VmaBlockMetadata_TLSF::Validate() const +{ + VMA_VALIDATE(GetSumFreeSize() <= GetSize()); + + VkDeviceSize calculatedSize = m_NullBlock->size; + VkDeviceSize calculatedFreeSize = m_NullBlock->size; + size_t allocCount = 0; + size_t freeCount = 0; + + // Check integrity of free lists + for (uint32_t list = 0; list < m_ListsCount; ++list) + { + Block* block = m_FreeList[list]; + if (block != VMA_NULL) + { + VMA_VALIDATE(block->IsFree()); + VMA_VALIDATE(block->PrevFree() == VMA_NULL); + while (block->NextFree()) + { + VMA_VALIDATE(block->NextFree()->IsFree()); + VMA_VALIDATE(block->NextFree()->PrevFree() == block); + block = block->NextFree(); + } + } + } + + VkDeviceSize nextOffset = m_NullBlock->offset; + auto validateCtx = m_GranularityHandler.StartValidation(GetAllocationCallbacks(), IsVirtual()); + + VMA_VALIDATE(m_NullBlock->nextPhysical == VMA_NULL); + if (m_NullBlock->prevPhysical) + { + VMA_VALIDATE(m_NullBlock->prevPhysical->nextPhysical == m_NullBlock); + } + // Check all blocks + for (Block* prev = m_NullBlock->prevPhysical; prev != VMA_NULL; prev = prev->prevPhysical) + { + VMA_VALIDATE(prev->offset + prev->size == nextOffset); + nextOffset = prev->offset; + calculatedSize += prev->size; + + uint32_t listIndex = GetListIndex(prev->size); + if (prev->IsFree()) + { + ++freeCount; + // Check if free block belongs to free list + Block* freeBlock = m_FreeList[listIndex]; + VMA_VALIDATE(freeBlock != VMA_NULL); + + bool found = false; + do + { + if (freeBlock == prev) + found = true; + + freeBlock = freeBlock->NextFree(); + } while (!found && freeBlock != VMA_NULL); + + VMA_VALIDATE(found); + calculatedFreeSize += prev->size; + } + else + { + ++allocCount; + // Check if taken block is not on a free list + Block* freeBlock = m_FreeList[listIndex]; + while (freeBlock) + { + VMA_VALIDATE(freeBlock != prev); + freeBlock = freeBlock->NextFree(); + } + + if (!IsVirtual()) + { + VMA_VALIDATE(m_GranularityHandler.Validate(validateCtx, prev->offset, prev->size)); + } + } + + if (prev->prevPhysical) + { + VMA_VALIDATE(prev->prevPhysical->nextPhysical == prev); + } + } + + if (!IsVirtual()) + { + VMA_VALIDATE(m_GranularityHandler.FinishValidation(validateCtx)); + } + + VMA_VALIDATE(nextOffset == 0); + VMA_VALIDATE(calculatedSize == GetSize()); + VMA_VALIDATE(calculatedFreeSize == GetSumFreeSize()); + VMA_VALIDATE(allocCount == m_AllocCount); + VMA_VALIDATE(freeCount == m_BlocksFreeCount); + + return true; +} + +void VmaBlockMetadata_TLSF::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const +{ + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += GetSize(); + if (m_NullBlock->size > 0) + VmaAddDetailedStatisticsUnusedRange(inoutStats, m_NullBlock->size); + + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (block->IsFree()) + VmaAddDetailedStatisticsUnusedRange(inoutStats, block->size); + else + VmaAddDetailedStatisticsAllocation(inoutStats, block->size); + } +} + +void VmaBlockMetadata_TLSF::AddStatistics(VmaStatistics& inoutStats) const +{ + inoutStats.blockCount++; + inoutStats.allocationCount += (uint32_t)m_AllocCount; + inoutStats.blockBytes += GetSize(); + inoutStats.allocationBytes += GetSize() - GetSumFreeSize(); +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_TLSF::PrintDetailedMap(class VmaJsonWriter& json) const +{ + size_t blockCount = m_AllocCount + m_BlocksFreeCount; + VmaStlAllocator allocator(GetAllocationCallbacks()); + VmaVector> blockList(blockCount, allocator); + + size_t i = blockCount; + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + blockList[--i] = block; + } + VMA_ASSERT(i == 0); + + VmaDetailedStatistics stats; + VmaClearDetailedStatistics(stats); + AddDetailedStatistics(stats); + + PrintDetailedMap_Begin(json, + stats.statistics.blockBytes - stats.statistics.allocationBytes, + stats.statistics.allocationCount, + stats.unusedRangeCount); + + for (; i < blockCount; ++i) + { + Block* block = blockList[i]; + if (block->IsFree()) + PrintDetailedMap_UnusedRange(json, block->offset, block->size); + else + PrintDetailedMap_Allocation(json, block->offset, block->size, block->UserData()); + } + if (m_NullBlock->size > 0) + PrintDetailedMap_UnusedRange(json, m_NullBlock->offset, m_NullBlock->size); + + PrintDetailedMap_End(json); +} +#endif + +bool VmaBlockMetadata_TLSF::CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(allocSize > 0 && "Cannot allocate empty block!"); + VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm."); + + // For small granularity round up + if (!IsVirtual()) + m_GranularityHandler.RoundupAllocRequest(allocType, allocSize, allocAlignment); + + allocSize += GetDebugMargin(); + // Quick check for too small pool + if (allocSize > GetSumFreeSize()) + return false; + + // If no free blocks in pool then check only null block + if (m_BlocksFreeCount == 0) + return CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest); + + // Round up to the next block + VkDeviceSize sizeForNextList = allocSize; + VkDeviceSize smallSizeStep = SMALL_BUFFER_SIZE / (IsVirtual() ? 1 << SECOND_LEVEL_INDEX : 4); + if (allocSize > SMALL_BUFFER_SIZE) + { + sizeForNextList += (1ULL << (VMA_BITSCAN_MSB(allocSize) - SECOND_LEVEL_INDEX)); + } + else if (allocSize > SMALL_BUFFER_SIZE - smallSizeStep) + sizeForNextList = SMALL_BUFFER_SIZE + 1; + else + sizeForNextList += smallSizeStep; + + uint32_t nextListIndex = 0; + uint32_t prevListIndex = 0; + Block* nextListBlock = VMA_NULL; + Block* prevListBlock = VMA_NULL; + + // Check blocks according to strategies + if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT) + { + // Quick check for larger block first + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + if (nextListBlock != VMA_NULL && CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // If not fitted then null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Null block failed, search larger bucket + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + + // Failed again, check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + } + else if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT) + { + // Check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Check larger bucket + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + } + else if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT ) + { + // Perform search from the start + VmaStlAllocator allocator(GetAllocationCallbacks()); + VmaVector> blockList(m_BlocksFreeCount, allocator); + + size_t i = m_BlocksFreeCount; + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (block->IsFree() && block->size >= allocSize) + blockList[--i] = block; + } + + for (; i < m_BlocksFreeCount; ++i) + { + Block& block = *blockList[i]; + if (CheckBlock(block, GetListIndex(block.size), allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Whole range searched, no more memory + return false; + } + else + { + // Check larger bucket + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + } + + // Worst case, full search has to be done + while (++nextListIndex < m_ListsCount) + { + nextListBlock = m_FreeList[nextListIndex]; + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + } + + // No more memory sadly + return false; +} + +VkResult VmaBlockMetadata_TLSF::CheckCorruption(const void* pBlockData) +{ + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (!block->IsFree()) + { + if (!VmaValidateMagicValue(pBlockData, block->offset + block->size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + return VK_SUCCESS; +} + +void VmaBlockMetadata_TLSF::Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) +{ + VMA_ASSERT(request.type == VmaAllocationRequestType::TLSF); + + // Get block and pop it from the free list + Block* currentBlock = (Block*)request.allocHandle; + VkDeviceSize offset = request.algorithmData; + VMA_ASSERT(currentBlock != VMA_NULL); + VMA_ASSERT(currentBlock->offset <= offset); + + if (currentBlock != m_NullBlock) + RemoveFreeBlock(currentBlock); + + VkDeviceSize debugMargin = GetDebugMargin(); + VkDeviceSize misssingAlignment = offset - currentBlock->offset; + + // Append missing alignment to prev block or create new one + if (misssingAlignment) + { + Block* prevBlock = currentBlock->prevPhysical; + VMA_ASSERT(prevBlock != VMA_NULL && "There should be no missing alignment at offset 0!"); + + if (prevBlock->IsFree() && prevBlock->size != debugMargin) + { + uint32_t oldList = GetListIndex(prevBlock->size); + prevBlock->size += misssingAlignment; + // Check if new size crosses list bucket + if (oldList != GetListIndex(prevBlock->size)) + { + prevBlock->size -= misssingAlignment; + RemoveFreeBlock(prevBlock); + prevBlock->size += misssingAlignment; + InsertFreeBlock(prevBlock); + } + else + m_BlocksFreeSize += misssingAlignment; + } + else + { + Block* newBlock = m_BlockAllocator.Alloc(); + currentBlock->prevPhysical = newBlock; + prevBlock->nextPhysical = newBlock; + newBlock->prevPhysical = prevBlock; + newBlock->nextPhysical = currentBlock; + newBlock->size = misssingAlignment; + newBlock->offset = currentBlock->offset; + newBlock->MarkTaken(); + + InsertFreeBlock(newBlock); + } + + currentBlock->size -= misssingAlignment; + currentBlock->offset += misssingAlignment; + } + + VkDeviceSize size = request.size + debugMargin; + if (currentBlock->size == size) + { + if (currentBlock == m_NullBlock) + { + // Setup new null block + m_NullBlock = m_BlockAllocator.Alloc(); + m_NullBlock->size = 0; + m_NullBlock->offset = currentBlock->offset + size; + m_NullBlock->prevPhysical = currentBlock; + m_NullBlock->nextPhysical = VMA_NULL; + m_NullBlock->MarkFree(); + m_NullBlock->PrevFree() = VMA_NULL; + m_NullBlock->NextFree() = VMA_NULL; + currentBlock->nextPhysical = m_NullBlock; + currentBlock->MarkTaken(); + } + } + else + { + VMA_ASSERT(currentBlock->size > size && "Proper block already found, shouldn't find smaller one!"); + + // Create new free block + Block* newBlock = m_BlockAllocator.Alloc(); + newBlock->size = currentBlock->size - size; + newBlock->offset = currentBlock->offset + size; + newBlock->prevPhysical = currentBlock; + newBlock->nextPhysical = currentBlock->nextPhysical; + currentBlock->nextPhysical = newBlock; + currentBlock->size = size; + + if (currentBlock == m_NullBlock) + { + m_NullBlock = newBlock; + m_NullBlock->MarkFree(); + m_NullBlock->NextFree() = VMA_NULL; + m_NullBlock->PrevFree() = VMA_NULL; + currentBlock->MarkTaken(); + } + else + { + newBlock->nextPhysical->prevPhysical = newBlock; + newBlock->MarkTaken(); + InsertFreeBlock(newBlock); + } + } + currentBlock->UserData() = userData; + + if (debugMargin > 0) + { + currentBlock->size -= debugMargin; + Block* newBlock = m_BlockAllocator.Alloc(); + newBlock->size = debugMargin; + newBlock->offset = currentBlock->offset + currentBlock->size; + newBlock->prevPhysical = currentBlock; + newBlock->nextPhysical = currentBlock->nextPhysical; + newBlock->MarkTaken(); + currentBlock->nextPhysical->prevPhysical = newBlock; + currentBlock->nextPhysical = newBlock; + InsertFreeBlock(newBlock); + } + + if (!IsVirtual()) + m_GranularityHandler.AllocPages((uint8_t)(uintptr_t)request.customData, + currentBlock->offset, currentBlock->size); + ++m_AllocCount; +} + +void VmaBlockMetadata_TLSF::Free(VmaAllocHandle allocHandle) +{ + Block* block = (Block*)allocHandle; + Block* next = block->nextPhysical; + VMA_ASSERT(!block->IsFree() && "Block is already free!"); + + if (!IsVirtual()) + m_GranularityHandler.FreePages(block->offset, block->size); + --m_AllocCount; + + VkDeviceSize debugMargin = GetDebugMargin(); + if (debugMargin > 0) + { + RemoveFreeBlock(next); + MergeBlock(next, block); + block = next; + next = next->nextPhysical; + } + + // Try merging + Block* prev = block->prevPhysical; + if (prev != VMA_NULL && prev->IsFree() && prev->size != debugMargin) + { + RemoveFreeBlock(prev); + MergeBlock(block, prev); + } + + if (!next->IsFree()) + InsertFreeBlock(block); + else if (next == m_NullBlock) + MergeBlock(m_NullBlock, block); + else + { + RemoveFreeBlock(next); + MergeBlock(next, block); + InsertFreeBlock(next); + } +} + +void VmaBlockMetadata_TLSF::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Cannot get allocation info for free block!"); + outInfo.offset = block->offset; + outInfo.size = block->size; + outInfo.pUserData = block->UserData(); +} + +void* VmaBlockMetadata_TLSF::GetAllocationUserData(VmaAllocHandle allocHandle) const +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Cannot get user data for free block!"); + return block->UserData(); +} + +VmaAllocHandle VmaBlockMetadata_TLSF::GetAllocationListBegin() const +{ + if (m_AllocCount == 0) + return VK_NULL_HANDLE; + + for (Block* block = m_NullBlock->prevPhysical; block; block = block->prevPhysical) + { + if (!block->IsFree()) + return (VmaAllocHandle)block; + } + VMA_ASSERT(false && "If m_AllocCount > 0 then should find any allocation!"); + return VK_NULL_HANDLE; +} + +VmaAllocHandle VmaBlockMetadata_TLSF::GetNextAllocation(VmaAllocHandle prevAlloc) const +{ + Block* startBlock = (Block*)prevAlloc; + VMA_ASSERT(!startBlock->IsFree() && "Incorrect block!"); + + for (Block* block = startBlock->prevPhysical; block; block = block->prevPhysical) + { + if (!block->IsFree()) + return (VmaAllocHandle)block; + } + return VK_NULL_HANDLE; +} + +VkDeviceSize VmaBlockMetadata_TLSF::GetNextFreeRegionSize(VmaAllocHandle alloc) const +{ + Block* block = (Block*)alloc; + VMA_ASSERT(!block->IsFree() && "Incorrect block!"); + + if (block->prevPhysical) + return block->prevPhysical->IsFree() ? block->prevPhysical->size : 0; + return 0; +} + +void VmaBlockMetadata_TLSF::Clear() +{ + m_AllocCount = 0; + m_BlocksFreeCount = 0; + m_BlocksFreeSize = 0; + m_IsFreeBitmap = 0; + m_NullBlock->offset = 0; + m_NullBlock->size = GetSize(); + Block* block = m_NullBlock->prevPhysical; + m_NullBlock->prevPhysical = VMA_NULL; + while (block) + { + Block* prev = block->prevPhysical; + m_BlockAllocator.Free(block); + block = prev; + } + memset(m_FreeList, 0, m_ListsCount * sizeof(Block*)); + memset(m_InnerIsFreeBitmap, 0, m_MemoryClasses * sizeof(uint32_t)); + m_GranularityHandler.Clear(); +} + +void VmaBlockMetadata_TLSF::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Trying to set user data for not allocated block!"); + block->UserData() = userData; +} + +void VmaBlockMetadata_TLSF::DebugLogAllAllocations() const +{ + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + if (!block->IsFree()) + DebugLogAllocation(block->offset, block->size, block->UserData()); +} + +uint8_t VmaBlockMetadata_TLSF::SizeToMemoryClass(VkDeviceSize size) const +{ + if (size > SMALL_BUFFER_SIZE) + return VMA_BITSCAN_MSB(size) - MEMORY_CLASS_SHIFT; + return 0; +} + +uint16_t VmaBlockMetadata_TLSF::SizeToSecondIndex(VkDeviceSize size, uint8_t memoryClass) const +{ + if (memoryClass == 0) + { + if (IsVirtual()) + return static_cast((size - 1) / 8); + else + return static_cast((size - 1) / 64); + } + return static_cast((size >> (memoryClass + MEMORY_CLASS_SHIFT - SECOND_LEVEL_INDEX)) ^ (1U << SECOND_LEVEL_INDEX)); +} + +uint32_t VmaBlockMetadata_TLSF::GetListIndex(uint8_t memoryClass, uint16_t secondIndex) const +{ + if (memoryClass == 0) + return secondIndex; + + const uint32_t index = static_cast(memoryClass - 1) * (1 << SECOND_LEVEL_INDEX) + secondIndex; + if (IsVirtual()) + return index + (1 << SECOND_LEVEL_INDEX); + else + return index + 4; +} + +uint32_t VmaBlockMetadata_TLSF::GetListIndex(VkDeviceSize size) const +{ + uint8_t memoryClass = SizeToMemoryClass(size); + return GetListIndex(memoryClass, SizeToSecondIndex(size, memoryClass)); +} + +void VmaBlockMetadata_TLSF::RemoveFreeBlock(Block* block) +{ + VMA_ASSERT(block != m_NullBlock); + VMA_ASSERT(block->IsFree()); + + if (block->NextFree() != VMA_NULL) + block->NextFree()->PrevFree() = block->PrevFree(); + if (block->PrevFree() != VMA_NULL) + block->PrevFree()->NextFree() = block->NextFree(); + else + { + uint8_t memClass = SizeToMemoryClass(block->size); + uint16_t secondIndex = SizeToSecondIndex(block->size, memClass); + uint32_t index = GetListIndex(memClass, secondIndex); + VMA_ASSERT(m_FreeList[index] == block); + m_FreeList[index] = block->NextFree(); + if (block->NextFree() == VMA_NULL) + { + m_InnerIsFreeBitmap[memClass] &= ~(1U << secondIndex); + if (m_InnerIsFreeBitmap[memClass] == 0) + m_IsFreeBitmap &= ~(1UL << memClass); + } + } + block->MarkTaken(); + block->UserData() = VMA_NULL; + --m_BlocksFreeCount; + m_BlocksFreeSize -= block->size; +} + +void VmaBlockMetadata_TLSF::InsertFreeBlock(Block* block) +{ + VMA_ASSERT(block != m_NullBlock); + VMA_ASSERT(!block->IsFree() && "Cannot insert block twice!"); + + uint8_t memClass = SizeToMemoryClass(block->size); + uint16_t secondIndex = SizeToSecondIndex(block->size, memClass); + uint32_t index = GetListIndex(memClass, secondIndex); + VMA_ASSERT(index < m_ListsCount); + block->PrevFree() = VMA_NULL; + block->NextFree() = m_FreeList[index]; + m_FreeList[index] = block; + if (block->NextFree() != VMA_NULL) + block->NextFree()->PrevFree() = block; + else + { + m_InnerIsFreeBitmap[memClass] |= 1U << secondIndex; + m_IsFreeBitmap |= 1UL << memClass; + } + ++m_BlocksFreeCount; + m_BlocksFreeSize += block->size; +} + +void VmaBlockMetadata_TLSF::MergeBlock(Block* block, Block* prev) +{ + VMA_ASSERT(block->prevPhysical == prev && "Cannot merge seperate physical regions!"); + VMA_ASSERT(!prev->IsFree() && "Cannot merge block that belongs to free list!"); + + block->offset = prev->offset; + block->size += prev->size; + block->prevPhysical = prev->prevPhysical; + if (block->prevPhysical) + block->prevPhysical->nextPhysical = block; + m_BlockAllocator.Free(prev); +} + +VmaBlockMetadata_TLSF::Block* VmaBlockMetadata_TLSF::FindFreeBlock(VkDeviceSize size, uint32_t& listIndex) const +{ + uint8_t memoryClass = SizeToMemoryClass(size); + uint32_t innerFreeMap = m_InnerIsFreeBitmap[memoryClass] & (~0U << SizeToSecondIndex(size, memoryClass)); + if (!innerFreeMap) + { + // Check higher levels for avaiable blocks + uint32_t freeMap = m_IsFreeBitmap & (~0UL << (memoryClass + 1)); + if (!freeMap) + return VMA_NULL; // No more memory avaible + + // Find lowest free region + memoryClass = VMA_BITSCAN_LSB(freeMap); + innerFreeMap = m_InnerIsFreeBitmap[memoryClass]; + VMA_ASSERT(innerFreeMap != 0); + } + // Find lowest free subregion + listIndex = GetListIndex(memoryClass, VMA_BITSCAN_LSB(innerFreeMap)); + VMA_ASSERT(m_FreeList[listIndex]); + return m_FreeList[listIndex]; +} + +bool VmaBlockMetadata_TLSF::CheckBlock( + Block& block, + uint32_t listIndex, + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(block.IsFree() && "Block is already taken!"); + + VkDeviceSize alignedOffset = VmaAlignUp(block.offset, allocAlignment); + if (block.size < allocSize + alignedOffset - block.offset) + return false; + + // Check for granularity conflicts + if (!IsVirtual() && + m_GranularityHandler.CheckConflictAndAlignUp(alignedOffset, allocSize, block.offset, block.size, allocType)) + return false; + + // Alloc successful + pAllocationRequest->type = VmaAllocationRequestType::TLSF; + pAllocationRequest->allocHandle = (VmaAllocHandle)█ + pAllocationRequest->size = allocSize - GetDebugMargin(); + pAllocationRequest->customData = (void*)allocType; + pAllocationRequest->algorithmData = alignedOffset; + + // Place block at the start of list if it's normal block + if (listIndex != m_ListsCount && block.PrevFree()) + { + block.PrevFree()->NextFree() = block.NextFree(); + if (block.NextFree()) + block.NextFree()->PrevFree() = block.PrevFree(); + block.PrevFree() = VMA_NULL; + block.NextFree() = m_FreeList[listIndex]; + m_FreeList[listIndex] = █ + if (block.NextFree()) + block.NextFree()->PrevFree() = █ + } + + return true; +} +#endif // _VMA_BLOCK_METADATA_TLSF_FUNCTIONS +#endif // _VMA_BLOCK_METADATA_TLSF + +#ifndef _VMA_BLOCK_VECTOR +/* +Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific +Vulkan memory type. + +Synchronized internally with a mutex. +*/ +class VmaBlockVector +{ + friend struct VmaDefragmentationContext_T; + VMA_CLASS_NO_COPY(VmaBlockVector) +public: + VmaBlockVector( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceSize preferredBlockSize, + size_t minBlockCount, + size_t maxBlockCount, + VkDeviceSize bufferImageGranularity, + bool explicitBlockSize, + uint32_t algorithm, + float priority, + VkDeviceSize minAllocationAlignment, + void* pMemoryAllocateNext); + ~VmaBlockVector(); + + VmaAllocator GetAllocator() const { return m_hAllocator; } + VmaPool GetParentPool() const { return m_hParentPool; } + bool IsCustomPool() const { return m_hParentPool != VMA_NULL; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; } + VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; } + uint32_t GetAlgorithm() const { return m_Algorithm; } + bool HasExplicitBlockSize() const { return m_ExplicitBlockSize; } + float GetPriority() const { return m_Priority; } + const void* GetAllocationNextPtr() const { return m_pMemoryAllocateNext; } + // To be used only while the m_Mutex is locked. Used during defragmentation. + size_t GetBlockCount() const { return m_Blocks.size(); } + // To be used only while the m_Mutex is locked. Used during defragmentation. + VmaDeviceMemoryBlock* GetBlock(size_t index) const { return m_Blocks[index]; } + VMA_RW_MUTEX &GetMutex() { return m_Mutex; } + + VkResult CreateMinBlocks(); + void AddStatistics(VmaStatistics& inoutStats); + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats); + bool IsEmpty(); + bool IsCorruptionDetectionEnabled() const; + + VkResult Allocate( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations); + + void Free(const VmaAllocation hAllocation); + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json); +#endif + + VkResult CheckCorruption(); + +private: + const VmaAllocator m_hAllocator; + const VmaPool m_hParentPool; + const uint32_t m_MemoryTypeIndex; + const VkDeviceSize m_PreferredBlockSize; + const size_t m_MinBlockCount; + const size_t m_MaxBlockCount; + const VkDeviceSize m_BufferImageGranularity; + const bool m_ExplicitBlockSize; + const uint32_t m_Algorithm; + const float m_Priority; + const VkDeviceSize m_MinAllocationAlignment; + + void* const m_pMemoryAllocateNext; + VMA_RW_MUTEX m_Mutex; + // Incrementally sorted by sumFreeSize, ascending. + VmaVector> m_Blocks; + uint32_t m_NextBlockId; + bool m_IncrementalSort = true; + + void SetIncrementalSort(bool val) { m_IncrementalSort = val; } + + VkDeviceSize CalcMaxBlockSize() const; + // Finds and removes given block from vector. + void Remove(VmaDeviceMemoryBlock* pBlock); + // Performs single step in sorting m_Blocks. They may not be fully sorted + // after this call. + void IncrementallySortBlocks(); + void SortByFreeSize(); + + VkResult AllocatePage( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation); + + VkResult AllocateFromBlock( + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize size, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + uint32_t strategy, + VmaAllocation* pAllocation); + + VkResult CommitAllocationRequest( + VmaAllocationRequest& allocRequest, + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation); + + VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex); + bool HasEmptyBlock(); +}; +#endif // _VMA_BLOCK_VECTOR + +#ifndef _VMA_DEFRAGMENTATION_CONTEXT +struct VmaDefragmentationContext_T +{ + VMA_CLASS_NO_COPY(VmaDefragmentationContext_T) +public: + VmaDefragmentationContext_T( + VmaAllocator hAllocator, + const VmaDefragmentationInfo& info); + ~VmaDefragmentationContext_T(); + + void GetStats(VmaDefragmentationStats& outStats) { outStats = m_GlobalStats; } + + VkResult DefragmentPassBegin(VmaDefragmentationPassMoveInfo& moveInfo); + VkResult DefragmentPassEnd(VmaDefragmentationPassMoveInfo& moveInfo); + +private: + // Max number of allocations to ignore due to size constraints before ending single pass + static const uint8_t MAX_ALLOCS_TO_IGNORE = 16; + enum class CounterStatus { Pass, Ignore, End }; + + struct FragmentedBlock + { + uint32_t data; + VmaDeviceMemoryBlock* block; + }; + struct StateBalanced + { + VkDeviceSize avgFreeSize = 0; + VkDeviceSize avgAllocSize = UINT64_MAX; + }; + struct StateExtensive + { + enum class Operation : uint8_t + { + FindFreeBlockBuffer, FindFreeBlockTexture, FindFreeBlockAll, + MoveBuffers, MoveTextures, MoveAll, + Cleanup, Done + }; + + Operation operation = Operation::FindFreeBlockTexture; + size_t firstFreeBlock = SIZE_MAX; + }; + struct MoveAllocationData + { + VkDeviceSize size; + VkDeviceSize alignment; + VmaSuballocationType type; + VmaAllocationCreateFlags flags; + VmaDefragmentationMove move = {}; + }; + + const VkDeviceSize m_MaxPassBytes; + const uint32_t m_MaxPassAllocations; + + VmaStlAllocator m_MoveAllocator; + VmaVector> m_Moves; + + uint8_t m_IgnoredAllocs = 0; + uint32_t m_Algorithm; + uint32_t m_BlockVectorCount; + VmaBlockVector* m_PoolBlockVector; + VmaBlockVector** m_pBlockVectors; + size_t m_ImmovableBlockCount = 0; + VmaDefragmentationStats m_GlobalStats = { 0 }; + VmaDefragmentationStats m_PassStats = { 0 }; + void* m_AlgorithmState = VMA_NULL; + + static MoveAllocationData GetMoveData(VmaAllocHandle handle, VmaBlockMetadata* metadata); + CounterStatus CheckCounters(VkDeviceSize bytes); + bool IncrementCounters(VkDeviceSize bytes); + bool ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block); + bool AllocInOtherBlock(size_t start, size_t end, MoveAllocationData& data, VmaBlockVector& vector); + + bool ComputeDefragmentation(VmaBlockVector& vector, size_t index); + bool ComputeDefragmentation_Fast(VmaBlockVector& vector); + bool ComputeDefragmentation_Balanced(VmaBlockVector& vector, size_t index, bool update); + bool ComputeDefragmentation_Full(VmaBlockVector& vector); + bool ComputeDefragmentation_Extensive(VmaBlockVector& vector, size_t index); + + void UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state); + bool MoveDataToFreeBlocks(VmaSuballocationType currentType, + VmaBlockVector& vector, size_t firstFreeBlock, + bool& texturePresent, bool& bufferPresent, bool& otherPresent); +}; +#endif // _VMA_DEFRAGMENTATION_CONTEXT + +#ifndef _VMA_POOL_T +struct VmaPool_T +{ + friend struct VmaPoolListItemTraits; + VMA_CLASS_NO_COPY(VmaPool_T) +public: + VmaBlockVector m_BlockVector; + VmaDedicatedAllocationList m_DedicatedAllocations; + + VmaPool_T( + VmaAllocator hAllocator, + const VmaPoolCreateInfo& createInfo, + VkDeviceSize preferredBlockSize); + ~VmaPool_T(); + + uint32_t GetId() const { return m_Id; } + void SetId(uint32_t id) { VMA_ASSERT(m_Id == 0); m_Id = id; } + + const char* GetName() const { return m_Name; } + void SetName(const char* pName); + +#if VMA_STATS_STRING_ENABLED + //void PrintDetailedMap(class VmaStringBuilder& sb); +#endif + +private: + uint32_t m_Id; + char* m_Name; + VmaPool_T* m_PrevPool = VMA_NULL; + VmaPool_T* m_NextPool = VMA_NULL; +}; + +struct VmaPoolListItemTraits +{ + typedef VmaPool_T ItemType; + + static ItemType* GetPrev(const ItemType* item) { return item->m_PrevPool; } + static ItemType* GetNext(const ItemType* item) { return item->m_NextPool; } + static ItemType*& AccessPrev(ItemType* item) { return item->m_PrevPool; } + static ItemType*& AccessNext(ItemType* item) { return item->m_NextPool; } +}; +#endif // _VMA_POOL_T + +#ifndef _VMA_CURRENT_BUDGET_DATA +struct VmaCurrentBudgetData +{ + VMA_ATOMIC_UINT32 m_BlockCount[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT32 m_AllocationCount[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS]; + +#if VMA_MEMORY_BUDGET + VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch; + VMA_RW_MUTEX m_BudgetMutex; + uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS]; + uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS]; + uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS]; +#endif // VMA_MEMORY_BUDGET + + VmaCurrentBudgetData(); + + void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize); + void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize); +}; + +#ifndef _VMA_CURRENT_BUDGET_DATA_FUNCTIONS +VmaCurrentBudgetData::VmaCurrentBudgetData() +{ + for (uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex) + { + m_BlockCount[heapIndex] = 0; + m_AllocationCount[heapIndex] = 0; + m_BlockBytes[heapIndex] = 0; + m_AllocationBytes[heapIndex] = 0; +#if VMA_MEMORY_BUDGET + m_VulkanUsage[heapIndex] = 0; + m_VulkanBudget[heapIndex] = 0; + m_BlockBytesAtBudgetFetch[heapIndex] = 0; +#endif + } + +#if VMA_MEMORY_BUDGET + m_OperationsSinceBudgetFetch = 0; +#endif +} + +void VmaCurrentBudgetData::AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize) +{ + m_AllocationBytes[heapIndex] += allocationSize; + ++m_AllocationCount[heapIndex]; +#if VMA_MEMORY_BUDGET + ++m_OperationsSinceBudgetFetch; +#endif +} + +void VmaCurrentBudgetData::RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize) +{ + VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize); + m_AllocationBytes[heapIndex] -= allocationSize; + VMA_ASSERT(m_AllocationCount[heapIndex] > 0); + --m_AllocationCount[heapIndex]; +#if VMA_MEMORY_BUDGET + ++m_OperationsSinceBudgetFetch; +#endif +} +#endif // _VMA_CURRENT_BUDGET_DATA_FUNCTIONS +#endif // _VMA_CURRENT_BUDGET_DATA + +#ifndef _VMA_ALLOCATION_OBJECT_ALLOCATOR +/* +Thread-safe wrapper over VmaPoolAllocator free list, for allocation of VmaAllocation_T objects. +*/ +class VmaAllocationObjectAllocator +{ + VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator) +public: + VmaAllocationObjectAllocator(const VkAllocationCallbacks* pAllocationCallbacks) + : m_Allocator(pAllocationCallbacks, 1024) {} + + template VmaAllocation Allocate(Types&&... args); + void Free(VmaAllocation hAlloc); + +private: + VMA_MUTEX m_Mutex; + VmaPoolAllocator m_Allocator; +}; + +template +VmaAllocation VmaAllocationObjectAllocator::Allocate(Types&&... args) +{ + VmaMutexLock mutexLock(m_Mutex); + return m_Allocator.Alloc(std::forward(args)...); +} + +void VmaAllocationObjectAllocator::Free(VmaAllocation hAlloc) +{ + VmaMutexLock mutexLock(m_Mutex); + m_Allocator.Free(hAlloc); +} +#endif // _VMA_ALLOCATION_OBJECT_ALLOCATOR + +#ifndef _VMA_VIRTUAL_BLOCK_T +struct VmaVirtualBlock_T +{ + VMA_CLASS_NO_COPY(VmaVirtualBlock_T) +public: + const bool m_AllocationCallbacksSpecified; + const VkAllocationCallbacks m_AllocationCallbacks; + + VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo); + ~VmaVirtualBlock_T(); + + VkResult Init() { return VK_SUCCESS; } + bool IsEmpty() const { return m_Metadata->IsEmpty(); } + void Free(VmaVirtualAllocation allocation) { m_Metadata->Free((VmaAllocHandle)allocation); } + void SetAllocationUserData(VmaVirtualAllocation allocation, void* userData) { m_Metadata->SetAllocationUserData((VmaAllocHandle)allocation, userData); } + void Clear() { m_Metadata->Clear(); } + + const VkAllocationCallbacks* GetAllocationCallbacks() const; + void GetAllocationInfo(VmaVirtualAllocation allocation, VmaVirtualAllocationInfo& outInfo); + VkResult Allocate(const VmaVirtualAllocationCreateInfo& createInfo, VmaVirtualAllocation& outAllocation, + VkDeviceSize* outOffset); + void GetStatistics(VmaStatistics& outStats) const; + void CalculateDetailedStatistics(VmaDetailedStatistics& outStats) const; +#if VMA_STATS_STRING_ENABLED + void BuildStatsString(bool detailedMap, VmaStringBuilder& sb) const; +#endif + +private: + VmaBlockMetadata* m_Metadata; +}; + +#ifndef _VMA_VIRTUAL_BLOCK_T_FUNCTIONS +VmaVirtualBlock_T::VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo) + : m_AllocationCallbacksSpecified(createInfo.pAllocationCallbacks != VMA_NULL), + m_AllocationCallbacks(createInfo.pAllocationCallbacks != VMA_NULL ? *createInfo.pAllocationCallbacks : VmaEmptyAllocationCallbacks) +{ + const uint32_t algorithm = createInfo.flags & VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK; + switch (algorithm) + { + default: + VMA_ASSERT(0); + case 0: + m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1, true); + break; + case VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT: + m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_Linear)(VK_NULL_HANDLE, 1, true); + break; + } + + m_Metadata->Init(createInfo.size); +} + +VmaVirtualBlock_T::~VmaVirtualBlock_T() +{ + // Define macro VMA_DEBUG_LOG to receive the list of the unfreed allocations + if (!m_Metadata->IsEmpty()) + m_Metadata->DebugLogAllAllocations(); + // This is the most important assert in the entire library. + // Hitting it means you have some memory leak - unreleased virtual allocations. + VMA_ASSERT(m_Metadata->IsEmpty() && "Some virtual allocations were not freed before destruction of this virtual block!"); + + vma_delete(GetAllocationCallbacks(), m_Metadata); +} + +const VkAllocationCallbacks* VmaVirtualBlock_T::GetAllocationCallbacks() const +{ + return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL; +} + +void VmaVirtualBlock_T::GetAllocationInfo(VmaVirtualAllocation allocation, VmaVirtualAllocationInfo& outInfo) +{ + m_Metadata->GetAllocationInfo((VmaAllocHandle)allocation, outInfo); +} + +VkResult VmaVirtualBlock_T::Allocate(const VmaVirtualAllocationCreateInfo& createInfo, VmaVirtualAllocation& outAllocation, + VkDeviceSize* outOffset) +{ + VmaAllocationRequest request = {}; + if (m_Metadata->CreateAllocationRequest( + createInfo.size, // allocSize + VMA_MAX(createInfo.alignment, (VkDeviceSize)1), // allocAlignment + (createInfo.flags & VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0, // upperAddress + VMA_SUBALLOCATION_TYPE_UNKNOWN, // allocType - unimportant + createInfo.flags & VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK, // strategy + &request)) + { + m_Metadata->Alloc(request, + VMA_SUBALLOCATION_TYPE_UNKNOWN, // type - unimportant + createInfo.pUserData); + outAllocation = (VmaVirtualAllocation)request.allocHandle; + if(outOffset) + *outOffset = m_Metadata->GetAllocationOffset(request.allocHandle); + return VK_SUCCESS; + } + outAllocation = (VmaVirtualAllocation)VK_NULL_HANDLE; + if (outOffset) + *outOffset = UINT64_MAX; + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +void VmaVirtualBlock_T::GetStatistics(VmaStatistics& outStats) const +{ + VmaClearStatistics(outStats); + m_Metadata->AddStatistics(outStats); +} + +void VmaVirtualBlock_T::CalculateDetailedStatistics(VmaDetailedStatistics& outStats) const +{ + VmaClearDetailedStatistics(outStats); + m_Metadata->AddDetailedStatistics(outStats); +} + +#if VMA_STATS_STRING_ENABLED +void VmaVirtualBlock_T::BuildStatsString(bool detailedMap, VmaStringBuilder& sb) const +{ + VmaJsonWriter json(GetAllocationCallbacks(), sb); + json.BeginObject(); + + VmaDetailedStatistics stats; + CalculateDetailedStatistics(stats); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats); + + if (detailedMap) + { + json.WriteString("Details"); + json.BeginObject(); + m_Metadata->PrintDetailedMap(json); + json.EndObject(); + } + + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_VIRTUAL_BLOCK_T_FUNCTIONS +#endif // _VMA_VIRTUAL_BLOCK_T + + +// Main allocator object. +struct VmaAllocator_T +{ + VMA_CLASS_NO_COPY(VmaAllocator_T) +public: + bool m_UseMutex; + uint32_t m_VulkanApiVersion; + bool m_UseKhrDedicatedAllocation; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0). + bool m_UseKhrBindMemory2; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0). + bool m_UseExtMemoryBudget; + bool m_UseAmdDeviceCoherentMemory; + bool m_UseKhrBufferDeviceAddress; + bool m_UseExtMemoryPriority; + VkDevice m_hDevice; + VkInstance m_hInstance; + bool m_AllocationCallbacksSpecified; + VkAllocationCallbacks m_AllocationCallbacks; + VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks; + VmaAllocationObjectAllocator m_AllocationObjectAllocator; + + // Each bit (1 << i) is set if HeapSizeLimit is enabled for that heap, so cannot allocate more than the heap size. + uint32_t m_HeapSizeLimitMask; + + VkPhysicalDeviceProperties m_PhysicalDeviceProperties; + VkPhysicalDeviceMemoryProperties m_MemProps; + + // Default pools. + VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES]; + VmaDedicatedAllocationList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES]; + + VmaCurrentBudgetData m_Budget; + VMA_ATOMIC_UINT32 m_DeviceMemoryCount; // Total number of VkDeviceMemory objects. + + VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo); + VkResult Init(const VmaAllocatorCreateInfo* pCreateInfo); + ~VmaAllocator_T(); + + const VkAllocationCallbacks* GetAllocationCallbacks() const + { + return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL; + } + const VmaVulkanFunctions& GetVulkanFunctions() const + { + return m_VulkanFunctions; + } + + VkPhysicalDevice GetPhysicalDevice() const { return m_PhysicalDevice; } + + VkDeviceSize GetBufferImageGranularity() const + { + return VMA_MAX( + static_cast(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY), + m_PhysicalDeviceProperties.limits.bufferImageGranularity); + } + + uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; } + uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; } + + uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const + { + VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount); + return m_MemProps.memoryTypes[memTypeIndex].heapIndex; + } + // True when specific memory type is HOST_VISIBLE but not HOST_COHERENT. + bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex) const + { + return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) == + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + } + // Minimum alignment for all allocations in specific memory type. + VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex) const + { + return IsMemoryTypeNonCoherent(memTypeIndex) ? + VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) : + (VkDeviceSize)VMA_MIN_ALIGNMENT; + } + + bool IsIntegratedGpu() const + { + return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU; + } + + uint32_t GetGlobalMemoryTypeBits() const { return m_GlobalMemoryTypeBits; } + + void GetBufferMemoryRequirements( + VkBuffer hBuffer, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const; + void GetImageMemoryRequirements( + VkImage hImage, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const; + VkResult FindMemoryTypeIndex( + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkFlags bufImgUsage, // VkBufferCreateInfo::usage or VkImageCreateInfo::usage. UINT32_MAX if unknown. + uint32_t* pMemoryTypeIndex) const; + + // Main allocation function. + VkResult AllocateMemory( + const VkMemoryRequirements& vkMemReq, + bool requiresDedicatedAllocation, + bool prefersDedicatedAllocation, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VkFlags dedicatedBufferImageUsage, // UINT32_MAX if unknown. + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations); + + // Main deallocation function. + void FreeMemory( + size_t allocationCount, + const VmaAllocation* pAllocations); + + void CalculateStatistics(VmaTotalStatistics* pStats); + + void GetHeapBudgets( + VmaBudget* outBudgets, uint32_t firstHeap, uint32_t heapCount); + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json); +#endif + + void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo); + + VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool); + void DestroyPool(VmaPool pool); + void GetPoolStatistics(VmaPool pool, VmaStatistics* pPoolStats); + void CalculatePoolStatistics(VmaPool pool, VmaDetailedStatistics* pPoolStats); + + void SetCurrentFrameIndex(uint32_t frameIndex); + uint32_t GetCurrentFrameIndex() const { return m_CurrentFrameIndex.load(); } + + VkResult CheckPoolCorruption(VmaPool hPool); + VkResult CheckCorruption(uint32_t memoryTypeBits); + + // Call to Vulkan function vkAllocateMemory with accompanying bookkeeping. + VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory); + // Call to Vulkan function vkFreeMemory with accompanying bookkeeping. + void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory); + // Call to Vulkan function vkBindBufferMemory or vkBindBufferMemory2KHR. + VkResult BindVulkanBuffer( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkBuffer buffer, + const void* pNext); + // Call to Vulkan function vkBindImageMemory or vkBindImageMemory2KHR. + VkResult BindVulkanImage( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkImage image, + const void* pNext); + + VkResult Map(VmaAllocation hAllocation, void** ppData); + void Unmap(VmaAllocation hAllocation); + + VkResult BindBufferMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext); + VkResult BindImageMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext); + + VkResult FlushOrInvalidateAllocation( + VmaAllocation hAllocation, + VkDeviceSize offset, VkDeviceSize size, + VMA_CACHE_OPERATION op); + VkResult FlushOrInvalidateAllocations( + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, const VkDeviceSize* sizes, + VMA_CACHE_OPERATION op); + + void FillAllocation(const VmaAllocation hAllocation, uint8_t pattern); + + /* + Returns bit mask of memory types that can support defragmentation on GPU as + they support creation of required buffer for copy operations. + */ + uint32_t GetGpuDefragmentationMemoryTypeBits(); + +#if VMA_EXTERNAL_MEMORY + VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex) const + { + return m_TypeExternalMemoryHandleTypes[memTypeIndex]; + } +#endif // #if VMA_EXTERNAL_MEMORY + +private: + VkDeviceSize m_PreferredLargeHeapBlockSize; + + VkPhysicalDevice m_PhysicalDevice; + VMA_ATOMIC_UINT32 m_CurrentFrameIndex; + VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits; // UINT32_MAX means uninitialized. +#if VMA_EXTERNAL_MEMORY + VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES]; +#endif // #if VMA_EXTERNAL_MEMORY + + VMA_RW_MUTEX m_PoolsMutex; + typedef VmaIntrusiveLinkedList PoolList; + // Protected by m_PoolsMutex. + PoolList m_Pools; + uint32_t m_NextPoolId; + + VmaVulkanFunctions m_VulkanFunctions; + + // Global bit mask AND-ed with any memoryTypeBits to disallow certain memory types. + uint32_t m_GlobalMemoryTypeBits; + + void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions); + +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + void ImportVulkanFunctions_Static(); +#endif + + void ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVulkanFunctions); + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + void ImportVulkanFunctions_Dynamic(); +#endif + + void ValidateVulkanFunctions(); + + VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex); + + VkResult AllocateMemoryOfType( + VmaPool pool, + VkDeviceSize size, + VkDeviceSize alignment, + bool dedicatedPreferred, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VkFlags dedicatedBufferImageUsage, + const VmaAllocationCreateInfo& createInfo, + uint32_t memTypeIndex, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + VmaBlockVector& blockVector, + size_t allocationCount, + VmaAllocation* pAllocations); + + // Helper function only to be used inside AllocateDedicatedMemory. + VkResult AllocateDedicatedMemoryPage( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + uint32_t memTypeIndex, + const VkMemoryAllocateInfo& allocInfo, + bool map, + bool isUserDataString, + bool isMappingAllowed, + void* pUserData, + VmaAllocation* pAllocation); + + // Allocates and registers new VkDeviceMemory specifically for dedicated allocations. + VkResult AllocateDedicatedMemory( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + uint32_t memTypeIndex, + bool map, + bool isUserDataString, + bool isMappingAllowed, + bool canAliasMemory, + void* pUserData, + float priority, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VkFlags dedicatedBufferImageUsage, + size_t allocationCount, + VmaAllocation* pAllocations, + const void* pNextChain = nullptr); + + void FreeDedicatedMemory(const VmaAllocation allocation); + + VkResult CalcMemTypeParams( + VmaAllocationCreateInfo& outCreateInfo, + uint32_t memTypeIndex, + VkDeviceSize size, + size_t allocationCount); + VkResult CalcAllocationParams( + VmaAllocationCreateInfo& outCreateInfo, + bool dedicatedRequired, + bool dedicatedPreferred); + + /* + Calculates and returns bit mask of memory types that can support defragmentation + on GPU as they support creation of required buffer for copy operations. + */ + uint32_t CalculateGpuDefragmentationMemoryTypeBits() const; + uint32_t CalculateGlobalMemoryTypeBits() const; + + bool GetFlushOrInvalidateRange( + VmaAllocation allocation, + VkDeviceSize offset, VkDeviceSize size, + VkMappedMemoryRange& outRange) const; + +#if VMA_MEMORY_BUDGET + void UpdateVulkanBudget(); +#endif // #if VMA_MEMORY_BUDGET +}; + + +#ifndef _VMA_MEMORY_FUNCTIONS +static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment) +{ + return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment); +} + +static void VmaFree(VmaAllocator hAllocator, void* ptr) +{ + VmaFree(&hAllocator->m_AllocationCallbacks, ptr); +} + +template +static T* VmaAllocate(VmaAllocator hAllocator) +{ + return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T)); +} + +template +static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count) +{ + return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T)); +} + +template +static void vma_delete(VmaAllocator hAllocator, T* ptr) +{ + if(ptr != VMA_NULL) + { + ptr->~T(); + VmaFree(hAllocator, ptr); + } +} + +template +static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count) +{ + if(ptr != VMA_NULL) + { + for(size_t i = count; i--; ) + ptr[i].~T(); + VmaFree(hAllocator, ptr); + } +} +#endif // _VMA_MEMORY_FUNCTIONS + +#ifndef _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS +VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) + : m_pMetadata(VMA_NULL), + m_MemoryTypeIndex(UINT32_MAX), + m_Id(0), + m_hMemory(VK_NULL_HANDLE), + m_MapCount(0), + m_pMappedData(VMA_NULL) {} + +VmaDeviceMemoryBlock::~VmaDeviceMemoryBlock() +{ + VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped."); + VMA_ASSERT(m_hMemory == VK_NULL_HANDLE); +} + +void VmaDeviceMemoryBlock::Init( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t newMemoryTypeIndex, + VkDeviceMemory newMemory, + VkDeviceSize newSize, + uint32_t id, + uint32_t algorithm, + VkDeviceSize bufferImageGranularity) +{ + VMA_ASSERT(m_hMemory == VK_NULL_HANDLE); + + m_hParentPool = hParentPool; + m_MemoryTypeIndex = newMemoryTypeIndex; + m_Id = id; + m_hMemory = newMemory; + + switch (algorithm) + { + case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT: + m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator->GetAllocationCallbacks(), + bufferImageGranularity, false); // isVirtual + break; + default: + VMA_ASSERT(0); + // Fall-through. + case 0: + m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(), + bufferImageGranularity, false); // isVirtual + } + m_pMetadata->Init(newSize); +} + +void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator) +{ + // Define macro VMA_DEBUG_LOG to receive the list of the unfreed allocations + if (!m_pMetadata->IsEmpty()) + m_pMetadata->DebugLogAllAllocations(); + // This is the most important assert in the entire library. + // Hitting it means you have some memory leak - unreleased VmaAllocation objects. + VMA_ASSERT(m_pMetadata->IsEmpty() && "Some allocations were not freed before destruction of this memory block!"); + + VMA_ASSERT(m_hMemory != VK_NULL_HANDLE); + allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory); + m_hMemory = VK_NULL_HANDLE; + + vma_delete(allocator, m_pMetadata); + m_pMetadata = VMA_NULL; +} + +void VmaDeviceMemoryBlock::PostFree(VmaAllocator hAllocator) +{ + if(m_MappingHysteresis.PostFree()) + { + VMA_ASSERT(m_MappingHysteresis.GetExtraMapping() == 0); + if (m_MapCount == 0) + { + m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory); + } + } +} + +bool VmaDeviceMemoryBlock::Validate() const +{ + VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) && + (m_pMetadata->GetSize() != 0)); + + return m_pMetadata->Validate(); +} + +VkResult VmaDeviceMemoryBlock::CheckCorruption(VmaAllocator hAllocator) +{ + void* pData = nullptr; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + res = m_pMetadata->CheckCorruption(pData); + + Unmap(hAllocator, 1); + + return res; +} + +VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData) +{ + if (count == 0) + { + return VK_SUCCESS; + } + + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + const uint32_t oldTotalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping(); + m_MappingHysteresis.PostMap(); + if (oldTotalMapCount != 0) + { + m_MapCount += count; + VMA_ASSERT(m_pMappedData != VMA_NULL); + if (ppData != VMA_NULL) + { + *ppData = m_pMappedData; + } + return VK_SUCCESS; + } + else + { + VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)( + hAllocator->m_hDevice, + m_hMemory, + 0, // offset + VK_WHOLE_SIZE, + 0, // flags + &m_pMappedData); + if (result == VK_SUCCESS) + { + if (ppData != VMA_NULL) + { + *ppData = m_pMappedData; + } + m_MapCount = count; + } + return result; + } +} + +void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count) +{ + if (count == 0) + { + return; + } + + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + if (m_MapCount >= count) + { + m_MapCount -= count; + const uint32_t totalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping(); + if (totalMapCount == 0) + { + m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory); + } + m_MappingHysteresis.PostUnmap(); + } + else + { + VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped."); + } +} + +VkResult VmaDeviceMemoryBlock::WriteMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize) +{ + VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION); + + void* pData; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + VmaWriteMagicValue(pData, allocOffset + allocSize); + + Unmap(hAllocator, 1); + return VK_SUCCESS; +} + +VkResult VmaDeviceMemoryBlock::ValidateMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize) +{ + VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION); + + void* pData; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + if (!VmaValidateMagicValue(pData, allocOffset + allocSize)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!"); + } + + Unmap(hAllocator, 1); + return VK_SUCCESS; +} + +VkResult VmaDeviceMemoryBlock::BindBufferMemory( + const VmaAllocator hAllocator, + const VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext) +{ + VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK && + hAllocation->GetBlock() == this); + VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() && + "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?"); + const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset; + // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads. + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext); +} + +VkResult VmaDeviceMemoryBlock::BindImageMemory( + const VmaAllocator hAllocator, + const VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext) +{ + VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK && + hAllocation->GetBlock() == this); + VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() && + "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?"); + const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset; + // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads. + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext); +} +#endif // _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS + +#ifndef _VMA_ALLOCATION_T_FUNCTIONS +VmaAllocation_T::VmaAllocation_T(bool mappingAllowed) + : m_Alignment{ 1 }, + m_Size{ 0 }, + m_pUserData{ VMA_NULL }, + m_pName{ VMA_NULL }, + m_MemoryTypeIndex{ 0 }, + m_Type{ (uint8_t)ALLOCATION_TYPE_NONE }, + m_SuballocationType{ (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN }, + m_MapCount{ 0 }, + m_Flags{ 0 } +{ + if(mappingAllowed) + m_Flags |= (uint8_t)FLAG_MAPPING_ALLOWED; + +#if VMA_STATS_STRING_ENABLED + m_BufferImageUsage = 0; +#endif +} + +VmaAllocation_T::~VmaAllocation_T() +{ + VMA_ASSERT(m_MapCount == 0 && "Allocation was not unmapped before destruction."); + + // Check if owned string was freed. + VMA_ASSERT(m_pName == VMA_NULL); +} + +void VmaAllocation_T::InitBlockAllocation( + VmaDeviceMemoryBlock* block, + VmaAllocHandle allocHandle, + VkDeviceSize alignment, + VkDeviceSize size, + uint32_t memoryTypeIndex, + VmaSuballocationType suballocationType, + bool mapped) +{ + VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE); + VMA_ASSERT(block != VMA_NULL); + m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK; + m_Alignment = alignment; + m_Size = size; + m_MemoryTypeIndex = memoryTypeIndex; + if(mapped) + { + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; + } + m_SuballocationType = (uint8_t)suballocationType; + m_BlockAllocation.m_Block = block; + m_BlockAllocation.m_AllocHandle = allocHandle; +} + +void VmaAllocation_T::InitDedicatedAllocation( + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceMemory hMemory, + VmaSuballocationType suballocationType, + void* pMappedData, + VkDeviceSize size) +{ + VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE); + VMA_ASSERT(hMemory != VK_NULL_HANDLE); + m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED; + m_Alignment = 0; + m_Size = size; + m_MemoryTypeIndex = memoryTypeIndex; + m_SuballocationType = (uint8_t)suballocationType; + if(pMappedData != VMA_NULL) + { + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; + } + m_DedicatedAllocation.m_hParentPool = hParentPool; + m_DedicatedAllocation.m_hMemory = hMemory; + m_DedicatedAllocation.m_pMappedData = pMappedData; + m_DedicatedAllocation.m_Prev = VMA_NULL; + m_DedicatedAllocation.m_Next = VMA_NULL; +} + +void VmaAllocation_T::SetName(VmaAllocator hAllocator, const char* pName) +{ + VMA_ASSERT(pName == VMA_NULL || pName != m_pName); + + FreeName(hAllocator); + + if (pName != VMA_NULL) + m_pName = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), pName); +} + +uint8_t VmaAllocation_T::SwapBlockAllocation(VmaAllocator hAllocator, VmaAllocation allocation) +{ + VMA_ASSERT(allocation != VMA_NULL); + VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK); + VMA_ASSERT(allocation->m_Type == ALLOCATION_TYPE_BLOCK); + + if (m_MapCount != 0) + m_BlockAllocation.m_Block->Unmap(hAllocator, m_MapCount); + + m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, allocation); + VMA_SWAP(m_BlockAllocation, allocation->m_BlockAllocation); + m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, this); + +#if VMA_STATS_STRING_ENABLED + VMA_SWAP(m_BufferImageUsage, allocation->m_BufferImageUsage); +#endif + return m_MapCount; +} + +VmaAllocHandle VmaAllocation_T::GetAllocHandle() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_AllocHandle; + case ALLOCATION_TYPE_DEDICATED: + return VK_NULL_HANDLE; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +VkDeviceSize VmaAllocation_T::GetOffset() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->m_pMetadata->GetAllocationOffset(m_BlockAllocation.m_AllocHandle); + case ALLOCATION_TYPE_DEDICATED: + return 0; + default: + VMA_ASSERT(0); + return 0; + } +} + +VmaPool VmaAllocation_T::GetParentPool() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->GetParentPool(); + case ALLOCATION_TYPE_DEDICATED: + return m_DedicatedAllocation.m_hParentPool; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +VkDeviceMemory VmaAllocation_T::GetMemory() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->GetDeviceMemory(); + case ALLOCATION_TYPE_DEDICATED: + return m_DedicatedAllocation.m_hMemory; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +void* VmaAllocation_T::GetMappedData() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + if (m_MapCount != 0 || IsPersistentMap()) + { + void* pBlockData = m_BlockAllocation.m_Block->GetMappedData(); + VMA_ASSERT(pBlockData != VMA_NULL); + return (char*)pBlockData + GetOffset(); + } + else + { + return VMA_NULL; + } + break; + case ALLOCATION_TYPE_DEDICATED: + VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0 || IsPersistentMap())); + return m_DedicatedAllocation.m_pMappedData; + default: + VMA_ASSERT(0); + return VMA_NULL; + } +} + +void VmaAllocation_T::BlockAllocMap() +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK); + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + + if (m_MapCount < 0xFF) + { + ++m_MapCount; + } + else + { + VMA_ASSERT(0 && "Allocation mapped too many times simultaneously."); + } +} + +void VmaAllocation_T::BlockAllocUnmap() +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK); + + if (m_MapCount > 0) + { + --m_MapCount; + } + else + { + VMA_ASSERT(0 && "Unmapping allocation not previously mapped."); + } +} + +VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData) +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + + if (m_MapCount != 0 || IsPersistentMap()) + { + if (m_MapCount < 0xFF) + { + VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL); + *ppData = m_DedicatedAllocation.m_pMappedData; + ++m_MapCount; + return VK_SUCCESS; + } + else + { + VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously."); + return VK_ERROR_MEMORY_MAP_FAILED; + } + } + else + { + VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)( + hAllocator->m_hDevice, + m_DedicatedAllocation.m_hMemory, + 0, // offset + VK_WHOLE_SIZE, + 0, // flags + ppData); + if (result == VK_SUCCESS) + { + m_DedicatedAllocation.m_pMappedData = *ppData; + m_MapCount = 1; + } + return result; + } +} + +void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator) +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); + + if (m_MapCount > 0) + { + --m_MapCount; + if (m_MapCount == 0 && !IsPersistentMap()) + { + m_DedicatedAllocation.m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)( + hAllocator->m_hDevice, + m_DedicatedAllocation.m_hMemory); + } + } + else + { + VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped."); + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaAllocation_T::InitBufferImageUsage(uint32_t bufferImageUsage) +{ + VMA_ASSERT(m_BufferImageUsage == 0); + m_BufferImageUsage = bufferImageUsage; +} + +void VmaAllocation_T::PrintParameters(class VmaJsonWriter& json) const +{ + json.WriteString("Type"); + json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]); + + json.WriteString("Size"); + json.WriteNumber(m_Size); + json.WriteString("Usage"); + json.WriteNumber(m_BufferImageUsage); + + if (m_pUserData != VMA_NULL) + { + json.WriteString("CustomData"); + json.BeginString(); + json.ContinueString_Pointer(m_pUserData); + json.EndString(); + } + if (m_pName != VMA_NULL) + { + json.WriteString("Name"); + json.WriteString(m_pName); + } +} +#endif // VMA_STATS_STRING_ENABLED + +void VmaAllocation_T::FreeName(VmaAllocator hAllocator) +{ + if(m_pName) + { + VmaFreeString(hAllocator->GetAllocationCallbacks(), m_pName); + m_pName = VMA_NULL; + } +} +#endif // _VMA_ALLOCATION_T_FUNCTIONS + +#ifndef _VMA_BLOCK_VECTOR_FUNCTIONS +VmaBlockVector::VmaBlockVector( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceSize preferredBlockSize, + size_t minBlockCount, + size_t maxBlockCount, + VkDeviceSize bufferImageGranularity, + bool explicitBlockSize, + uint32_t algorithm, + float priority, + VkDeviceSize minAllocationAlignment, + void* pMemoryAllocateNext) + : m_hAllocator(hAllocator), + m_hParentPool(hParentPool), + m_MemoryTypeIndex(memoryTypeIndex), + m_PreferredBlockSize(preferredBlockSize), + m_MinBlockCount(minBlockCount), + m_MaxBlockCount(maxBlockCount), + m_BufferImageGranularity(bufferImageGranularity), + m_ExplicitBlockSize(explicitBlockSize), + m_Algorithm(algorithm), + m_Priority(priority), + m_MinAllocationAlignment(minAllocationAlignment), + m_pMemoryAllocateNext(pMemoryAllocateNext), + m_Blocks(VmaStlAllocator(hAllocator->GetAllocationCallbacks())), + m_NextBlockId(0) {} + +VmaBlockVector::~VmaBlockVector() +{ + for (size_t i = m_Blocks.size(); i--; ) + { + m_Blocks[i]->Destroy(m_hAllocator); + vma_delete(m_hAllocator, m_Blocks[i]); + } +} + +VkResult VmaBlockVector::CreateMinBlocks() +{ + for (size_t i = 0; i < m_MinBlockCount; ++i) + { + VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL); + if (res != VK_SUCCESS) + { + return res; + } + } + return VK_SUCCESS; +} + +void VmaBlockVector::AddStatistics(VmaStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + const size_t blockCount = m_Blocks.size(); + for (uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex) + { + const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VMA_HEAVY_ASSERT(pBlock->Validate()); + pBlock->m_pMetadata->AddStatistics(inoutStats); + } +} + +void VmaBlockVector::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + const size_t blockCount = m_Blocks.size(); + for (uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex) + { + const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VMA_HEAVY_ASSERT(pBlock->Validate()); + pBlock->m_pMetadata->AddDetailedStatistics(inoutStats); + } +} + +bool VmaBlockVector::IsEmpty() +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + return m_Blocks.empty(); +} + +bool VmaBlockVector::IsCorruptionDetectionEnabled() const +{ + const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + return (VMA_DEBUG_DETECT_CORRUPTION != 0) && + (VMA_DEBUG_MARGIN > 0) && + (m_Algorithm == 0 || m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) && + (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags; +} + +VkResult VmaBlockVector::Allocate( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + size_t allocIndex; + VkResult res = VK_SUCCESS; + + alignment = VMA_MAX(alignment, m_MinAllocationAlignment); + + if (IsCorruptionDetectionEnabled()) + { + size = VmaAlignUp(size, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE)); + alignment = VmaAlignUp(alignment, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE)); + } + + { + VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex); + for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + res = AllocatePage( + size, + alignment, + createInfo, + suballocType, + pAllocations + allocIndex); + if (res != VK_SUCCESS) + { + break; + } + } + } + + if (res != VK_SUCCESS) + { + // Free all already created allocations. + while (allocIndex--) + Free(pAllocations[allocIndex]); + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + } + + return res; +} + +VkResult VmaBlockVector::AllocatePage( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation) +{ + const bool isUpperAddress = (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0; + + VkDeviceSize freeMemory; + { + const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex); + VmaBudget heapBudget = {}; + m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1); + freeMemory = (heapBudget.usage < heapBudget.budget) ? (heapBudget.budget - heapBudget.usage) : 0; + } + + const bool canFallbackToDedicated = !HasExplicitBlockSize() && + (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0; + const bool canCreateNewBlock = + ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) && + (m_Blocks.size() < m_MaxBlockCount) && + (freeMemory >= size || !canFallbackToDedicated); + uint32_t strategy = createInfo.flags & VMA_ALLOCATION_CREATE_STRATEGY_MASK; + + // Upper address can only be used with linear allocator and within single memory block. + if (isUpperAddress && + (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT || m_MaxBlockCount > 1)) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + // Early reject: requested allocation size is larger that maximum block size for this block vector. + if (size + VMA_DEBUG_MARGIN > m_PreferredBlockSize) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + + // 1. Search existing allocations. Try to allocate. + if (m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + { + // Use only last block. + if (!m_Blocks.empty()) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks.back(); + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG(" Returned from last block #%u", pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + else + { + if (strategy != VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT) // MIN_MEMORY or default + { + const bool isHostVisible = + (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0; + if(isHostVisible) + { + const bool isMappingAllowed = (createInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0; + /* + For non-mappable allocations, check blocks that are not mapped first. + For mappable allocations, check blocks that are already mapped first. + This way, having many blocks, we will separate mappable and non-mappable allocations, + hopefully limiting the number of blocks that are mapped, which will help tools like RenderDoc. + */ + for(size_t mappingI = 0; mappingI < 2; ++mappingI) + { + // Forward order in m_Blocks - prefer blocks with smallest amount of free space. + for (size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + const bool isBlockMapped = pCurrBlock->GetMappedData() != VMA_NULL; + if((mappingI == 0) == (isMappingAllowed == isBlockMapped)) + { + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + } + else + { + // Forward order in m_Blocks - prefer blocks with smallest amount of free space. + for (size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + else // VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT + { + // Backward order in m_Blocks - prefer blocks with largest amount of free space. + for (size_t blockIndex = m_Blocks.size(); blockIndex--; ) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock(pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + + // 2. Try to create new block. + if (canCreateNewBlock) + { + // Calculate optimal size for new block. + VkDeviceSize newBlockSize = m_PreferredBlockSize; + uint32_t newBlockSizeShift = 0; + const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3; + + if (!m_ExplicitBlockSize) + { + // Allocate 1/8, 1/4, 1/2 as first blocks. + const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize(); + for (uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i) + { + const VkDeviceSize smallerNewBlockSize = newBlockSize / 2; + if (smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2) + { + newBlockSize = smallerNewBlockSize; + ++newBlockSizeShift; + } + else + { + break; + } + } + } + + size_t newBlockIndex = 0; + VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ? + CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY; + // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize. + if (!m_ExplicitBlockSize) + { + while (res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX) + { + const VkDeviceSize smallerNewBlockSize = newBlockSize / 2; + if (smallerNewBlockSize >= size) + { + newBlockSize = smallerNewBlockSize; + ++newBlockSizeShift; + res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ? + CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + else + { + break; + } + } + } + + if (res == VK_SUCCESS) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex]; + VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size); + + res = AllocateFromBlock( + pBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG(" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + else + { + // Allocation from new block failed, possibly due to VMA_DEBUG_MARGIN or alignment. + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + } + } + + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +void VmaBlockVector::Free(const VmaAllocation hAllocation) +{ + VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL; + + bool budgetExceeded = false; + { + const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex); + VmaBudget heapBudget = {}; + m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1); + budgetExceeded = heapBudget.usage >= heapBudget.budget; + } + + // Scope for lock. + { + VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex); + + VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock(); + + if (IsCorruptionDetectionEnabled()) + { + VkResult res = pBlock->ValidateMagicValueAfterAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize()); + VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to validate magic value."); + } + + if (hAllocation->IsPersistentMap()) + { + pBlock->Unmap(m_hAllocator, 1); + } + + const bool hadEmptyBlockBeforeFree = HasEmptyBlock(); + pBlock->m_pMetadata->Free(hAllocation->GetAllocHandle()); + pBlock->PostFree(m_hAllocator); + VMA_HEAVY_ASSERT(pBlock->Validate()); + + VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex); + + const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount; + // pBlock became empty after this deallocation. + if (pBlock->m_pMetadata->IsEmpty()) + { + // Already had empty block. We don't want to have two, so delete this one. + if ((hadEmptyBlockBeforeFree || budgetExceeded) && canDeleteBlock) + { + pBlockToDelete = pBlock; + Remove(pBlock); + } + // else: We now have one empty block - leave it. A hysteresis to avoid allocating whole block back and forth. + } + // pBlock didn't become empty, but we have another empty block - find and free that one. + // (This is optional, heuristics.) + else if (hadEmptyBlockBeforeFree && canDeleteBlock) + { + VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back(); + if (pLastBlock->m_pMetadata->IsEmpty()) + { + pBlockToDelete = pLastBlock; + m_Blocks.pop_back(); + } + } + + IncrementallySortBlocks(); + } + + // Destruction of a free block. Deferred until this point, outside of mutex + // lock, for performance reason. + if (pBlockToDelete != VMA_NULL) + { + VMA_DEBUG_LOG(" Deleted empty block #%u", pBlockToDelete->GetId()); + pBlockToDelete->Destroy(m_hAllocator); + vma_delete(m_hAllocator, pBlockToDelete); + } + + m_hAllocator->m_Budget.RemoveAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), hAllocation->GetSize()); + m_hAllocator->m_AllocationObjectAllocator.Free(hAllocation); +} + +VkDeviceSize VmaBlockVector::CalcMaxBlockSize() const +{ + VkDeviceSize result = 0; + for (size_t i = m_Blocks.size(); i--; ) + { + result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize()); + if (result >= m_PreferredBlockSize) + { + break; + } + } + return result; +} + +void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock) +{ + for (uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + if (m_Blocks[blockIndex] == pBlock) + { + VmaVectorRemove(m_Blocks, blockIndex); + return; + } + } + VMA_ASSERT(0); +} + +void VmaBlockVector::IncrementallySortBlocks() +{ + if (!m_IncrementalSort) + return; + if (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + { + // Bubble sort only until first swap. + for (size_t i = 1; i < m_Blocks.size(); ++i) + { + if (m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize()) + { + VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]); + return; + } + } + } +} + +void VmaBlockVector::SortByFreeSize() +{ + VMA_SORT(m_Blocks.begin(), m_Blocks.end(), + [](VmaDeviceMemoryBlock* b1, VmaDeviceMemoryBlock* b2) -> bool + { + return b1->m_pMetadata->GetSumFreeSize() < b2->m_pMetadata->GetSumFreeSize(); + }); +} + +VkResult VmaBlockVector::AllocateFromBlock( + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize size, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + uint32_t strategy, + VmaAllocation* pAllocation) +{ + const bool isUpperAddress = (allocFlags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0; + + VmaAllocationRequest currRequest = {}; + if (pBlock->m_pMetadata->CreateAllocationRequest( + size, + alignment, + isUpperAddress, + suballocType, + strategy, + &currRequest)) + { + return CommitAllocationRequest(currRequest, pBlock, alignment, allocFlags, pUserData, suballocType, pAllocation); + } + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +VkResult VmaBlockVector::CommitAllocationRequest( + VmaAllocationRequest& allocRequest, + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation) +{ + const bool mapped = (allocFlags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0; + const bool isUserDataString = (allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0; + const bool isMappingAllowed = (allocFlags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0; + + pBlock->PostAlloc(); + // Allocate from pCurrBlock. + if (mapped) + { + VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL); + if (res != VK_SUCCESS) + { + return res; + } + } + + *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(isMappingAllowed); + pBlock->m_pMetadata->Alloc(allocRequest, suballocType, *pAllocation); + (*pAllocation)->InitBlockAllocation( + pBlock, + allocRequest.allocHandle, + alignment, + allocRequest.size, // Not size, as actual allocation size may be larger than requested! + m_MemoryTypeIndex, + suballocType, + mapped); + VMA_HEAVY_ASSERT(pBlock->Validate()); + if (isUserDataString) + (*pAllocation)->SetName(m_hAllocator, (const char*)pUserData); + else + (*pAllocation)->SetUserData(m_hAllocator, pUserData); + m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), allocRequest.size); + if (VMA_DEBUG_INITIALIZE_ALLOCATIONS) + { + m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED); + } + if (IsCorruptionDetectionEnabled()) + { + VkResult res = pBlock->WriteMagicValueAfterAllocation(m_hAllocator, (*pAllocation)->GetOffset(), allocRequest.size); + VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value."); + } + return VK_SUCCESS; +} + +VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex) +{ + VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; + allocInfo.pNext = m_pMemoryAllocateNext; + allocInfo.memoryTypeIndex = m_MemoryTypeIndex; + allocInfo.allocationSize = blockSize; + +#if VMA_BUFFER_DEVICE_ADDRESS + // Every standalone block can potentially contain a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT - always enable the feature. + VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR }; + if (m_hAllocator->m_UseKhrBufferDeviceAddress) + { + allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR; + VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo); + } +#endif // VMA_BUFFER_DEVICE_ADDRESS + +#if VMA_MEMORY_PRIORITY + VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT }; + if (m_hAllocator->m_UseExtMemoryPriority) + { + VMA_ASSERT(m_Priority >= 0.f && m_Priority <= 1.f); + priorityInfo.priority = m_Priority; + VmaPnextChainPushFront(&allocInfo, &priorityInfo); + } +#endif // VMA_MEMORY_PRIORITY + +#if VMA_EXTERNAL_MEMORY + // Attach VkExportMemoryAllocateInfoKHR if necessary. + VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR }; + exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex); + if (exportMemoryAllocInfo.handleTypes != 0) + { + VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo); + } +#endif // VMA_EXTERNAL_MEMORY + + VkDeviceMemory mem = VK_NULL_HANDLE; + VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem); + if (res < 0) + { + return res; + } + + // New VkDeviceMemory successfully created. + + // Create new Allocation for it. + VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator); + pBlock->Init( + m_hAllocator, + m_hParentPool, + m_MemoryTypeIndex, + mem, + allocInfo.allocationSize, + m_NextBlockId++, + m_Algorithm, + m_BufferImageGranularity); + + m_Blocks.push_back(pBlock); + if (pNewBlockIndex != VMA_NULL) + { + *pNewBlockIndex = m_Blocks.size() - 1; + } + + return VK_SUCCESS; +} + +bool VmaBlockVector::HasEmptyBlock() +{ + for (size_t index = 0, count = m_Blocks.size(); index < count; ++index) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[index]; + if (pBlock->m_pMetadata->IsEmpty()) + { + return true; + } + } + return false; +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + + json.BeginObject(); + for (size_t i = 0; i < m_Blocks.size(); ++i) + { + json.BeginString(); + json.ContinueString(m_Blocks[i]->GetId()); + json.EndString(); + + json.BeginObject(); + json.WriteString("MapRefCount"); + json.WriteNumber(m_Blocks[i]->GetMapRefCount()); + + m_Blocks[i]->m_pMetadata->PrintDetailedMap(json); + json.EndObject(); + } + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED + +VkResult VmaBlockVector::CheckCorruption() +{ + if (!IsCorruptionDetectionEnabled()) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + for (uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VkResult res = pBlock->CheckCorruption(m_hAllocator); + if (res != VK_SUCCESS) + { + return res; + } + } + return VK_SUCCESS; +} + +#endif // _VMA_BLOCK_VECTOR_FUNCTIONS + +#ifndef _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS +VmaDefragmentationContext_T::VmaDefragmentationContext_T( + VmaAllocator hAllocator, + const VmaDefragmentationInfo& info) + : m_MaxPassBytes(info.maxBytesPerPass == 0 ? VK_WHOLE_SIZE : info.maxBytesPerPass), + m_MaxPassAllocations(info.maxAllocationsPerPass == 0 ? UINT32_MAX : info.maxAllocationsPerPass), + m_MoveAllocator(hAllocator->GetAllocationCallbacks()), + m_Moves(m_MoveAllocator) +{ + m_Algorithm = info.flags & VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK; + + if (info.pool != VMA_NULL) + { + m_BlockVectorCount = 1; + m_PoolBlockVector = &info.pool->m_BlockVector; + m_pBlockVectors = &m_PoolBlockVector; + m_PoolBlockVector->SetIncrementalSort(false); + m_PoolBlockVector->SortByFreeSize(); + } + else + { + m_BlockVectorCount = hAllocator->GetMemoryTypeCount(); + m_PoolBlockVector = VMA_NULL; + m_pBlockVectors = hAllocator->m_pBlockVectors; + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + VmaBlockVector* vector = m_pBlockVectors[i]; + if (vector != VMA_NULL) + { + vector->SetIncrementalSort(false); + vector->SortByFreeSize(); + } + } + } + + switch (m_Algorithm) + { + case 0: // Default algorithm + m_Algorithm = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT; + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + { + m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount); + break; + } + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + { + if (hAllocator->GetBufferImageGranularity() > 1) + { + m_AlgorithmState = vma_new_array(hAllocator, StateExtensive, m_BlockVectorCount); + } + break; + } + } +} + +VmaDefragmentationContext_T::~VmaDefragmentationContext_T() +{ + if (m_PoolBlockVector != VMA_NULL) + { + m_PoolBlockVector->SetIncrementalSort(true); + } + else + { + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + VmaBlockVector* vector = m_pBlockVectors[i]; + if (vector != VMA_NULL) + vector->SetIncrementalSort(true); + } + } + + if (m_AlgorithmState) + { + switch (m_Algorithm) + { + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast(m_AlgorithmState), m_BlockVectorCount); + break; + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast(m_AlgorithmState), m_BlockVectorCount); + break; + default: + VMA_ASSERT(0); + } + } +} + +VkResult VmaDefragmentationContext_T::DefragmentPassBegin(VmaDefragmentationPassMoveInfo& moveInfo) +{ + if (m_PoolBlockVector != VMA_NULL) + { + VmaMutexLockWrite lock(m_PoolBlockVector->GetMutex(), m_PoolBlockVector->GetAllocator()->m_UseMutex); + + if (m_PoolBlockVector->GetBlockCount() > 1) + ComputeDefragmentation(*m_PoolBlockVector, 0); + else if (m_PoolBlockVector->GetBlockCount() == 1) + ReallocWithinBlock(*m_PoolBlockVector, m_PoolBlockVector->GetBlock(0)); + } + else + { + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + if (m_pBlockVectors[i] != VMA_NULL) + { + VmaMutexLockWrite lock(m_pBlockVectors[i]->GetMutex(), m_pBlockVectors[i]->GetAllocator()->m_UseMutex); + + if (m_pBlockVectors[i]->GetBlockCount() > 1) + { + if (ComputeDefragmentation(*m_pBlockVectors[i], i)) + break; + } + else if (m_pBlockVectors[i]->GetBlockCount() == 1) + { + if (ReallocWithinBlock(*m_pBlockVectors[i], m_pBlockVectors[i]->GetBlock(0))) + break; + } + } + } + } + + moveInfo.moveCount = static_cast(m_Moves.size()); + if (moveInfo.moveCount > 0) + { + moveInfo.pMoves = m_Moves.data(); + return VK_INCOMPLETE; + } + + moveInfo.pMoves = VMA_NULL; + return VK_SUCCESS; +} + +VkResult VmaDefragmentationContext_T::DefragmentPassEnd(VmaDefragmentationPassMoveInfo& moveInfo) +{ + VMA_ASSERT(moveInfo.moveCount > 0 ? moveInfo.pMoves != VMA_NULL : true); + + VkResult result = VK_SUCCESS; + VmaStlAllocator blockAllocator(m_MoveAllocator.m_pCallbacks); + VmaVector> immovableBlocks(blockAllocator); + VmaVector> mappedBlocks(blockAllocator); + + VmaAllocator allocator = VMA_NULL; + for (uint32_t i = 0; i < moveInfo.moveCount; ++i) + { + VmaDefragmentationMove& move = moveInfo.pMoves[i]; + size_t prevCount = 0, currentCount = 0; + VkDeviceSize freedBlockSize = 0; + + uint32_t vectorIndex; + VmaBlockVector* vector; + if (m_PoolBlockVector != VMA_NULL) + { + vectorIndex = 0; + vector = m_PoolBlockVector; + } + else + { + vectorIndex = move.srcAllocation->GetMemoryTypeIndex(); + vector = m_pBlockVectors[vectorIndex]; + VMA_ASSERT(vector != VMA_NULL); + } + + switch (move.operation) + { + case VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY: + { + uint8_t mapCount = move.srcAllocation->SwapBlockAllocation(vector->m_hAllocator, move.dstTmpAllocation); + if (mapCount > 0) + { + allocator = vector->m_hAllocator; + VmaDeviceMemoryBlock* newMapBlock = move.srcAllocation->GetBlock(); + bool notPresent = true; + for (FragmentedBlock& block : mappedBlocks) + { + if (block.block == newMapBlock) + { + notPresent = false; + block.data += mapCount; + break; + } + } + if (notPresent) + mappedBlocks.push_back({ mapCount, newMapBlock }); + } + + // Scope for locks, Free have it's own lock + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + prevCount = vector->GetBlockCount(); + freedBlockSize = move.dstTmpAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.dstTmpAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + currentCount = vector->GetBlockCount(); + } + + result = VK_INCOMPLETE; + break; + } + case VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE: + { + m_PassStats.bytesMoved -= move.srcAllocation->GetSize(); + --m_PassStats.allocationsMoved; + vector->Free(move.dstTmpAllocation); + + VmaDeviceMemoryBlock* newBlock = move.srcAllocation->GetBlock(); + bool notPresent = true; + for (const FragmentedBlock& block : immovableBlocks) + { + if (block.block == newBlock) + { + notPresent = false; + break; + } + } + if (notPresent) + immovableBlocks.push_back({ vectorIndex, newBlock }); + break; + } + case VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY: + { + m_PassStats.bytesMoved -= move.srcAllocation->GetSize(); + --m_PassStats.allocationsMoved; + // Scope for locks, Free have it's own lock + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + prevCount = vector->GetBlockCount(); + freedBlockSize = move.srcAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.srcAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + currentCount = vector->GetBlockCount(); + } + freedBlockSize *= prevCount - currentCount; + + VkDeviceSize dstBlockSize; + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + dstBlockSize = move.dstTmpAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.dstTmpAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + freedBlockSize += dstBlockSize * (currentCount - vector->GetBlockCount()); + currentCount = vector->GetBlockCount(); + } + + result = VK_INCOMPLETE; + break; + } + default: + VMA_ASSERT(0); + } + + if (prevCount > currentCount) + { + size_t freedBlocks = prevCount - currentCount; + m_PassStats.deviceMemoryBlocksFreed += static_cast(freedBlocks); + m_PassStats.bytesFreed += freedBlockSize; + } + + switch (m_Algorithm) + { + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + { + if (m_AlgorithmState != VMA_NULL) + { + // Avoid unnecessary tries to allocate when new free block is avaiable + StateExtensive& state = reinterpret_cast(m_AlgorithmState)[vectorIndex]; + if (state.firstFreeBlock != SIZE_MAX) + { + const size_t diff = prevCount - currentCount; + if (state.firstFreeBlock >= diff) + { + state.firstFreeBlock -= diff; + if (state.firstFreeBlock != 0) + state.firstFreeBlock -= vector->GetBlock(state.firstFreeBlock - 1)->m_pMetadata->IsEmpty(); + } + else + state.firstFreeBlock = 0; + } + } + } + } + } + moveInfo.moveCount = 0; + moveInfo.pMoves = VMA_NULL; + m_Moves.clear(); + + // Update stats + m_GlobalStats.allocationsMoved += m_PassStats.allocationsMoved; + m_GlobalStats.bytesFreed += m_PassStats.bytesFreed; + m_GlobalStats.bytesMoved += m_PassStats.bytesMoved; + m_GlobalStats.deviceMemoryBlocksFreed += m_PassStats.deviceMemoryBlocksFreed; + m_PassStats = { 0 }; + + // Move blocks with immovable allocations according to algorithm + if (immovableBlocks.size() > 0) + { + switch (m_Algorithm) + { + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + { + if (m_AlgorithmState != VMA_NULL) + { + bool swapped = false; + // Move to the start of free blocks range + for (const FragmentedBlock& block : immovableBlocks) + { + StateExtensive& state = reinterpret_cast(m_AlgorithmState)[block.data]; + if (state.operation != StateExtensive::Operation::Cleanup) + { + VmaBlockVector* vector = m_pBlockVectors[block.data]; + VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + + for (size_t i = 0, count = vector->GetBlockCount() - m_ImmovableBlockCount; i < count; ++i) + { + if (vector->GetBlock(i) == block.block) + { + VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[vector->GetBlockCount() - ++m_ImmovableBlockCount]); + if (state.firstFreeBlock != SIZE_MAX) + { + if (i + 1 < state.firstFreeBlock) + { + if (state.firstFreeBlock > 1) + VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[--state.firstFreeBlock]); + else + --state.firstFreeBlock; + } + } + swapped = true; + break; + } + } + } + } + if (swapped) + result = VK_INCOMPLETE; + break; + } + } + default: + { + // Move to the begining + for (const FragmentedBlock& block : immovableBlocks) + { + VmaBlockVector* vector = m_pBlockVectors[block.data]; + VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + + for (size_t i = m_ImmovableBlockCount; i < vector->GetBlockCount(); ++i) + { + if (vector->GetBlock(i) == block.block) + { + VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[m_ImmovableBlockCount++]); + break; + } + } + } + break; + } + } + } + + // Bulk-map destination blocks + for (const FragmentedBlock& block : mappedBlocks) + { + VkResult res = block.block->Map(allocator, block.data, VMA_NULL); + VMA_ASSERT(res == VK_SUCCESS); + } + return result; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation(VmaBlockVector& vector, size_t index) +{ + switch (m_Algorithm) + { + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT: + return ComputeDefragmentation_Fast(vector); + default: + VMA_ASSERT(0); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + return ComputeDefragmentation_Balanced(vector, index, true); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT: + return ComputeDefragmentation_Full(vector); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + return ComputeDefragmentation_Extensive(vector, index); + } +} + +VmaDefragmentationContext_T::MoveAllocationData VmaDefragmentationContext_T::GetMoveData( + VmaAllocHandle handle, VmaBlockMetadata* metadata) +{ + MoveAllocationData moveData; + moveData.move.srcAllocation = (VmaAllocation)metadata->GetAllocationUserData(handle); + moveData.size = moveData.move.srcAllocation->GetSize(); + moveData.alignment = moveData.move.srcAllocation->GetAlignment(); + moveData.type = moveData.move.srcAllocation->GetSuballocationType(); + moveData.flags = 0; + + if (moveData.move.srcAllocation->IsPersistentMap()) + moveData.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT; + if (moveData.move.srcAllocation->IsMappingAllowed()) + moveData.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; + + return moveData; +} + +VmaDefragmentationContext_T::CounterStatus VmaDefragmentationContext_T::CheckCounters(VkDeviceSize bytes) +{ + // Ignore allocation if will exceed max size for copy + if (m_PassStats.bytesMoved + bytes > m_MaxPassBytes) + { + if (++m_IgnoredAllocs < MAX_ALLOCS_TO_IGNORE) + return CounterStatus::Ignore; + else + return CounterStatus::End; + } + return CounterStatus::Pass; +} + +bool VmaDefragmentationContext_T::IncrementCounters(VkDeviceSize bytes) +{ + m_PassStats.bytesMoved += bytes; + // Early return when max found + if (++m_PassStats.allocationsMoved >= m_MaxPassAllocations || m_PassStats.bytesMoved >= m_MaxPassBytes) + { + VMA_ASSERT(m_PassStats.allocationsMoved == m_MaxPassAllocations || + m_PassStats.bytesMoved == m_MaxPassBytes && "Exceeded maximal pass threshold!"); + return true; + } + return false; +} + +bool VmaDefragmentationContext_T::ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block) +{ + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + default: + VMA_ASSERT(0); + case CounterStatus::Pass: + break; + } + + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::AllocInOtherBlock(size_t start, size_t end, MoveAllocationData& data, VmaBlockVector& vector) +{ + for (; start < end; ++start) + { + VmaDeviceMemoryBlock* dstBlock = vector.GetBlock(start); + if (dstBlock->m_pMetadata->GetSumFreeSize() >= data.size) + { + if (vector.AllocateFromBlock(dstBlock, + data.size, + data.alignment, + data.flags, + this, + data.type, + 0, + &data.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(data.move); + if (IncrementCounters(data.size)) + return true; + break; + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Fast(VmaBlockVector& vector) +{ + // Move only between blocks + + // Go through allocations in last blocks and try to fit them inside first ones + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + default: + VMA_ASSERT(0); + case CounterStatus::Pass: + break; + } + + // Check all previous blocks for free space + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Balanced(VmaBlockVector& vector, size_t index, bool update) +{ + // Go over every allocation and try to fit it in previous blocks at lowest offsets, + // if not possible: realloc within single block to minimize offset (exclude offset == 0), + // but only if there are noticable gaps between them (some heuristic, ex. average size of allocation in block) + VMA_ASSERT(m_AlgorithmState != VMA_NULL); + + StateBalanced& vectorState = reinterpret_cast(m_AlgorithmState)[index]; + if (update && vectorState.avgAllocSize == UINT64_MAX) + UpdateVectorStatistics(vector, vectorState); + + const size_t startMoveCount = m_Moves.size(); + VkDeviceSize minimalFreeRegion = vectorState.avgFreeSize / 2; + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(i); + VmaBlockMetadata* metadata = block->m_pMetadata; + VkDeviceSize prevFreeRegionSize = 0; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + default: + VMA_ASSERT(0); + case CounterStatus::Pass: + break; + } + + // Check all previous blocks for free space + const size_t prevMoveCount = m_Moves.size(); + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + + VkDeviceSize nextFreeRegionSize = metadata->GetNextFreeRegionSize(handle); + // If no room found then realloc within block for lower offset + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + // Check if realloc will make sense + if (prevFreeRegionSize >= minimalFreeRegion || + nextFreeRegionSize >= minimalFreeRegion || + moveData.size <= vectorState.avgFreeSize || + moveData.size <= vectorState.avgAllocSize) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + prevFreeRegionSize = nextFreeRegionSize; + } + } + + // No moves perfomed, update statistics to current vector state + if (startMoveCount == m_Moves.size() && !update) + { + vectorState.avgAllocSize = UINT64_MAX; + return ComputeDefragmentation_Balanced(vector, index, false); + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Full(VmaBlockVector& vector) +{ + // Go over every allocation and try to fit it in previous blocks at lowest offsets, + // if not possible: realloc within single block to minimize offset (exclude offset == 0) + + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(i); + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + default: + VMA_ASSERT(0); + case CounterStatus::Pass: + break; + } + + // Check all previous blocks for free space + const size_t prevMoveCount = m_Moves.size(); + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + + // If no room found then realloc within block for lower offset + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Extensive(VmaBlockVector& vector, size_t index) +{ + // First free single block, then populate it to the brim, then free another block, and so on + + // Fallback to previous algorithm since without granularity conflicts it can achieve max packing + if (vector.m_BufferImageGranularity == 1) + return ComputeDefragmentation_Full(vector); + + VMA_ASSERT(m_AlgorithmState != VMA_NULL); + + StateExtensive& vectorState = reinterpret_cast(m_AlgorithmState)[index]; + + bool texturePresent = false, bufferPresent = false, otherPresent = false; + switch (vectorState.operation) + { + case StateExtensive::Operation::Done: // Vector defragmented + return false; + case StateExtensive::Operation::FindFreeBlockBuffer: + case StateExtensive::Operation::FindFreeBlockTexture: + case StateExtensive::Operation::FindFreeBlockAll: + { + // No more blocks to free, just perform fast realloc and move to cleanup + if (vectorState.firstFreeBlock == 0) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + return ComputeDefragmentation_Fast(vector); + } + + // No free blocks, have to clear last one + size_t last = (vectorState.firstFreeBlock == SIZE_MAX ? vector.GetBlockCount() : vectorState.firstFreeBlock) - 1; + VmaBlockMetadata* freeMetadata = vector.GetBlock(last)->m_pMetadata; + + const size_t prevMoveCount = m_Moves.size(); + for (VmaAllocHandle handle = freeMetadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = freeMetadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, freeMetadata); + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + default: + VMA_ASSERT(0); + case CounterStatus::Pass: + break; + } + + // Check all previous blocks for free space + if (AllocInOtherBlock(0, last, moveData, vector)) + { + // Full clear performed already + if (prevMoveCount != m_Moves.size() && freeMetadata->GetNextAllocation(handle) == VK_NULL_HANDLE) + reinterpret_cast(m_AlgorithmState)[index] = last; + return true; + } + } + + if (prevMoveCount == m_Moves.size()) + { + // Cannot perform full clear, have to move data in other blocks around + if (last != 0) + { + for (size_t i = last - 1; i; --i) + { + if (ReallocWithinBlock(vector, vector.GetBlock(i))) + return true; + } + } + + if (prevMoveCount == m_Moves.size()) + { + // No possible reallocs within blocks, try to move them around fast + return ComputeDefragmentation_Fast(vector); + } + } + else + { + switch (vectorState.operation) + { + case StateExtensive::Operation::FindFreeBlockBuffer: + vectorState.operation = StateExtensive::Operation::MoveBuffers; + break; + default: + VMA_ASSERT(0); + case StateExtensive::Operation::FindFreeBlockTexture: + vectorState.operation = StateExtensive::Operation::MoveTextures; + break; + case StateExtensive::Operation::FindFreeBlockAll: + vectorState.operation = StateExtensive::Operation::MoveAll; + break; + } + vectorState.firstFreeBlock = last; + // Nothing done, block found without reallocations, can perform another reallocs in same pass + return ComputeDefragmentation_Extensive(vector, index); + } + break; + } + case StateExtensive::Operation::MoveTextures: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (texturePresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockTexture; + return ComputeDefragmentation_Extensive(vector, index); + } + + if (!bufferPresent && !otherPresent) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + break; + } + + // No more textures to move, check buffers + vectorState.operation = StateExtensive::Operation::MoveBuffers; + bufferPresent = false; + otherPresent = false; + } + else + break; + } + case StateExtensive::Operation::MoveBuffers: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_BUFFER, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (bufferPresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer; + return ComputeDefragmentation_Extensive(vector, index); + } + + if (!otherPresent) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + break; + } + + // No more buffers to move, check all others + vectorState.operation = StateExtensive::Operation::MoveAll; + otherPresent = false; + } + else + break; + } + case StateExtensive::Operation::MoveAll: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_FREE, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (otherPresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer; + return ComputeDefragmentation_Extensive(vector, index); + } + // Everything moved + vectorState.operation = StateExtensive::Operation::Cleanup; + } + break; + } + case StateExtensive::Operation::Cleanup: + // Cleanup is handled below so that other operations may reuse the cleanup code. This case is here to prevent the unhandled enum value warning (C4062). + break; + } + + if (vectorState.operation == StateExtensive::Operation::Cleanup) + { + // All other work done, pack data in blocks even tighter if possible + const size_t prevMoveCount = m_Moves.size(); + for (size_t i = 0; i < vector.GetBlockCount(); ++i) + { + if (ReallocWithinBlock(vector, vector.GetBlock(i))) + return true; + } + + if (prevMoveCount == m_Moves.size()) + vectorState.operation = StateExtensive::Operation::Done; + } + return false; +} + +void VmaDefragmentationContext_T::UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state) +{ + size_t allocCount = 0; + size_t freeCount = 0; + state.avgFreeSize = 0; + state.avgAllocSize = 0; + + for (size_t i = 0; i < vector.GetBlockCount(); ++i) + { + VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata; + + allocCount += metadata->GetAllocationCount(); + freeCount += metadata->GetFreeRegionsCount(); + state.avgFreeSize += metadata->GetSumFreeSize(); + state.avgAllocSize += metadata->GetSize(); + } + + state.avgAllocSize = (state.avgAllocSize - state.avgFreeSize) / allocCount; + state.avgFreeSize /= freeCount; +} + +bool VmaDefragmentationContext_T::MoveDataToFreeBlocks(VmaSuballocationType currentType, + VmaBlockVector& vector, size_t firstFreeBlock, + bool& texturePresent, bool& bufferPresent, bool& otherPresent) +{ + const size_t prevMoveCount = m_Moves.size(); + for (size_t i = firstFreeBlock ; i;) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(--i); + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + default: + VMA_ASSERT(0); + case CounterStatus::Pass: + break; + } + + // Move only single type of resources at once + if (!VmaIsBufferImageGranularityConflict(moveData.type, currentType)) + { + // Try to fit allocation into free blocks + if (AllocInOtherBlock(firstFreeBlock, vector.GetBlockCount(), moveData, vector)) + return false; + } + + if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)) + texturePresent = true; + else if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_BUFFER)) + bufferPresent = true; + else + otherPresent = true; + } + } + return prevMoveCount == m_Moves.size(); +} +#endif // _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS + +#ifndef _VMA_POOL_T_FUNCTIONS +VmaPool_T::VmaPool_T( + VmaAllocator hAllocator, + const VmaPoolCreateInfo& createInfo, + VkDeviceSize preferredBlockSize) + : m_BlockVector( + hAllocator, + this, // hParentPool + createInfo.memoryTypeIndex, + createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize, + createInfo.minBlockCount, + createInfo.maxBlockCount, + (createInfo.flags& VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(), + createInfo.blockSize != 0, // explicitBlockSize + createInfo.flags & VMA_POOL_CREATE_ALGORITHM_MASK, // algorithm + createInfo.priority, + VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment), + createInfo.pMemoryAllocateNext), + m_Id(0), + m_Name(VMA_NULL) {} + +VmaPool_T::~VmaPool_T() +{ + VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL); +} + +void VmaPool_T::SetName(const char* pName) +{ + const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks(); + VmaFreeString(allocs, m_Name); + + if (pName != VMA_NULL) + { + m_Name = VmaCreateStringCopy(allocs, pName); + } + else + { + m_Name = VMA_NULL; + } +} +#endif // _VMA_POOL_T_FUNCTIONS + +#ifndef _VMA_ALLOCATOR_T_FUNCTIONS +VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) : + m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0), + m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0), + m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0), + m_UseKhrBindMemory2((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0), + m_UseExtMemoryBudget((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0), + m_UseAmdDeviceCoherentMemory((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT) != 0), + m_UseKhrBufferDeviceAddress((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT) != 0), + m_UseExtMemoryPriority((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT) != 0), + m_hDevice(pCreateInfo->device), + m_hInstance(pCreateInfo->instance), + m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL), + m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ? + *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks), + m_AllocationObjectAllocator(&m_AllocationCallbacks), + m_HeapSizeLimitMask(0), + m_DeviceMemoryCount(0), + m_PreferredLargeHeapBlockSize(0), + m_PhysicalDevice(pCreateInfo->physicalDevice), + m_GpuDefragmentationMemoryTypeBits(UINT32_MAX), + m_NextPoolId(0), + m_GlobalMemoryTypeBits(UINT32_MAX) +{ + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + m_UseKhrDedicatedAllocation = false; + m_UseKhrBindMemory2 = false; + } + + if(VMA_DEBUG_DETECT_CORRUPTION) + { + // Needs to be multiply of uint32_t size because we are going to write VMA_CORRUPTION_DETECTION_MAGIC_VALUE to it. + VMA_ASSERT(VMA_DEBUG_MARGIN % sizeof(uint32_t) == 0); + } + + VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device && pCreateInfo->instance); + + if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0)) + { +#if !(VMA_DEDICATED_ALLOCATION) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros."); + } +#endif +#if !(VMA_BIND_MEMORY2) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros."); + } +#endif + } +#if !(VMA_MEMORY_BUDGET) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros."); + } +#endif +#if !(VMA_BUFFER_DEVICE_ADDRESS) + if(m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif +#if VMA_VULKAN_VERSION < 1002000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0)) + { + VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros."); + } +#endif +#if VMA_VULKAN_VERSION < 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros."); + } +#endif +#if !(VMA_MEMORY_PRIORITY) + if(m_UseExtMemoryPriority) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif + + memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks)); + memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties)); + memset(&m_MemProps, 0, sizeof(m_MemProps)); + + memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors)); + memset(&m_VulkanFunctions, 0, sizeof(m_VulkanFunctions)); + +#if VMA_EXTERNAL_MEMORY + memset(&m_TypeExternalMemoryHandleTypes, 0, sizeof(m_TypeExternalMemoryHandleTypes)); +#endif // #if VMA_EXTERNAL_MEMORY + + if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL) + { + m_DeviceMemoryCallbacks.pUserData = pCreateInfo->pDeviceMemoryCallbacks->pUserData; + m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate; + m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree; + } + + ImportVulkanFunctions(pCreateInfo->pVulkanFunctions); + + (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties); + (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps); + + VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT)); + VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY)); + VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity)); + VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize)); + + m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ? + pCreateInfo->preferredLargeHeapBlockSize : static_cast(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE); + + m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits(); + +#if VMA_EXTERNAL_MEMORY + if(pCreateInfo->pTypeExternalMemoryHandleTypes != VMA_NULL) + { + memcpy(m_TypeExternalMemoryHandleTypes, pCreateInfo->pTypeExternalMemoryHandleTypes, + sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount()); + } +#endif // #if VMA_EXTERNAL_MEMORY + + if(pCreateInfo->pHeapSizeLimit != VMA_NULL) + { + for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex) + { + const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex]; + if(limit != VK_WHOLE_SIZE) + { + m_HeapSizeLimitMask |= 1u << heapIndex; + if(limit < m_MemProps.memoryHeaps[heapIndex].size) + { + m_MemProps.memoryHeaps[heapIndex].size = limit; + } + } + } + } + + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + // Create only supported types + if((m_GlobalMemoryTypeBits & (1u << memTypeIndex)) != 0) + { + const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex); + m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)( + this, + VK_NULL_HANDLE, // hParentPool + memTypeIndex, + preferredBlockSize, + 0, + SIZE_MAX, + GetBufferImageGranularity(), + false, // explicitBlockSize + 0, // algorithm + 0.5f, // priority (0.5 is the default per Vulkan spec) + GetMemoryTypeMinAlignment(memTypeIndex), // minAllocationAlignment + VMA_NULL); // // pMemoryAllocateNext + // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here, + // becase minBlockCount is 0. + } + } +} + +VkResult VmaAllocator_T::Init(const VmaAllocatorCreateInfo* pCreateInfo) +{ + VkResult res = VK_SUCCESS; + +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + UpdateVulkanBudget(); + } +#endif // #if VMA_MEMORY_BUDGET + + return res; +} + +VmaAllocator_T::~VmaAllocator_T() +{ + VMA_ASSERT(m_Pools.IsEmpty()); + + for(size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; ) + { + vma_delete(this, m_pBlockVectors[memTypeIndex]); + } +} + +void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions) +{ +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + ImportVulkanFunctions_Static(); +#endif + + if(pVulkanFunctions != VMA_NULL) + { + ImportVulkanFunctions_Custom(pVulkanFunctions); + } + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + ImportVulkanFunctions_Dynamic(); +#endif + + ValidateVulkanFunctions(); +} + +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Static() +{ + // Vulkan 1.0 + m_VulkanFunctions.vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)vkGetInstanceProcAddr; + m_VulkanFunctions.vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)vkGetDeviceProcAddr; + m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties; + m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties; + m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory; + m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory; + m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory; + m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory; + m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges; + m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges; + m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory; + m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory; + m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements; + m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements; + m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer; + m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer; + m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage; + m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage; + m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer; + + // Vulkan 1.1 +#if VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2; + m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2; + m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2; + m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2; + m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2; + } +#endif + +#if VMA_VULKAN_VERSION >= 1003000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)vkGetDeviceBufferMemoryRequirements; + m_VulkanFunctions.vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)vkGetDeviceImageMemoryRequirements; + } +#endif +} + +#endif // VMA_STATIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVulkanFunctions) +{ + VMA_ASSERT(pVulkanFunctions != VMA_NULL); + +#define VMA_COPY_IF_NOT_NULL(funcName) \ + if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; + + VMA_COPY_IF_NOT_NULL(vkGetInstanceProcAddr); + VMA_COPY_IF_NOT_NULL(vkGetDeviceProcAddr); + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties); + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties); + VMA_COPY_IF_NOT_NULL(vkAllocateMemory); + VMA_COPY_IF_NOT_NULL(vkFreeMemory); + VMA_COPY_IF_NOT_NULL(vkMapMemory); + VMA_COPY_IF_NOT_NULL(vkUnmapMemory); + VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges); + VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges); + VMA_COPY_IF_NOT_NULL(vkBindBufferMemory); + VMA_COPY_IF_NOT_NULL(vkBindImageMemory); + VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkCreateBuffer); + VMA_COPY_IF_NOT_NULL(vkDestroyBuffer); + VMA_COPY_IF_NOT_NULL(vkCreateImage); + VMA_COPY_IF_NOT_NULL(vkDestroyImage); + VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer); + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR); + VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR); +#endif + +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR); + VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR); +#endif + +#if VMA_MEMORY_BUDGET + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR); +#endif + +#if VMA_VULKAN_VERSION >= 1003000 + VMA_COPY_IF_NOT_NULL(vkGetDeviceBufferMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkGetDeviceImageMemoryRequirements); +#endif + +#undef VMA_COPY_IF_NOT_NULL +} + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Dynamic() +{ + VMA_ASSERT(m_VulkanFunctions.vkGetInstanceProcAddr && m_VulkanFunctions.vkGetDeviceProcAddr && + "To use VMA_DYNAMIC_VULKAN_FUNCTIONS in new versions of VMA you now have to pass " + "VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as VmaAllocatorCreateInfo::pVulkanFunctions. " + "Other members can be null."); + +#define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \ + if(m_VulkanFunctions.memberName == VMA_NULL) \ + m_VulkanFunctions.memberName = \ + (functionPointerType)m_VulkanFunctions.vkGetInstanceProcAddr(m_hInstance, functionNameString); +#define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \ + if(m_VulkanFunctions.memberName == VMA_NULL) \ + m_VulkanFunctions.memberName = \ + (functionPointerType)m_VulkanFunctions.vkGetDeviceProcAddr(m_hDevice, functionNameString); + + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties, "vkGetPhysicalDeviceProperties"); + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties, "vkGetPhysicalDeviceMemoryProperties"); + VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory, "vkAllocateMemory"); + VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory, "vkFreeMemory"); + VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory, "vkMapMemory"); + VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory, "vkUnmapMemory"); + VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges, "vkFlushMappedMemoryRanges"); + VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges, "vkInvalidateMappedMemoryRanges"); + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory, "vkBindBufferMemory"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory, "vkBindImageMemory"); + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements, "vkGetBufferMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements, "vkGetImageMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer, "vkCreateBuffer"); + VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer, "vkDestroyBuffer"); + VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage, "vkCreateImage"); + VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage, "vkDestroyImage"); + VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer, "vkCmdCopyBuffer"); + +#if VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2, "vkGetBufferMemoryRequirements2"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2, "vkGetImageMemoryRequirements2"); + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2, "vkBindBufferMemory2"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2, "vkBindImageMemory2"); + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2, "vkGetPhysicalDeviceMemoryProperties2"); + } +#endif + +#if VMA_DEDICATED_ALLOCATION + if(m_UseKhrDedicatedAllocation) + { + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR, "vkGetBufferMemoryRequirements2KHR"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR, "vkGetImageMemoryRequirements2KHR"); + } +#endif + +#if VMA_BIND_MEMORY2 + if(m_UseKhrBindMemory2) + { + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR, "vkBindBufferMemory2KHR"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR, "vkBindImageMemory2KHR"); + } +#endif // #if VMA_BIND_MEMORY2 + +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2KHR"); + } +#endif // #if VMA_MEMORY_BUDGET + +#if VMA_VULKAN_VERSION >= 1003000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + VMA_FETCH_DEVICE_FUNC(vkGetDeviceBufferMemoryRequirements, PFN_vkGetDeviceBufferMemoryRequirements, "vkGetDeviceBufferMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirements, "vkGetDeviceImageMemoryRequirements"); + } +#endif + +#undef VMA_FETCH_DEVICE_FUNC +#undef VMA_FETCH_INSTANCE_FUNC +} + +#endif // VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ValidateVulkanFunctions() +{ + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL); + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation) + { + VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL); + } +#endif + +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2) + { + VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL); + } +#endif + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL); + } +#endif + +#if VMA_VULKAN_VERSION >= 1003000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + VMA_ASSERT(m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetDeviceImageMemoryRequirements != VMA_NULL); + } +#endif +} + +VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex) +{ + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex); + const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size; + const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE; + return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32); +} + +VkResult VmaAllocator_T::AllocateMemoryOfType( + VmaPool pool, + VkDeviceSize size, + VkDeviceSize alignment, + bool dedicatedPreferred, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VkFlags dedicatedBufferImageUsage, + const VmaAllocationCreateInfo& createInfo, + uint32_t memTypeIndex, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + VmaBlockVector& blockVector, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + VMA_ASSERT(pAllocations != VMA_NULL); + VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size); + + VmaAllocationCreateInfo finalCreateInfo = createInfo; + VkResult res = CalcMemTypeParams( + finalCreateInfo, + memTypeIndex, + size, + allocationCount); + if(res != VK_SUCCESS) + return res; + + if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0) + { + return AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + blockVector.GetAllocationNextPtr()); + } + else + { + const bool canAllocateDedicated = + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 && + (pool == VK_NULL_HANDLE || !blockVector.HasExplicitBlockSize()); + + if(canAllocateDedicated) + { + // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size. + if(size > blockVector.GetPreferredBlockSize() / 2) + { + dedicatedPreferred = true; + } + // Protection against creating each allocation as dedicated when we reach or exceed heap size/budget, + // which can quickly deplete maxMemoryAllocationCount: Don't prefer dedicated allocations when above + // 3/4 of the maximum allocation count. + if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4) + { + dedicatedPreferred = false; + } + + if(dedicatedPreferred) + { + res = AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + blockVector.GetAllocationNextPtr()); + if(res == VK_SUCCESS) + { + // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here. + VMA_DEBUG_LOG(" Allocated as DedicatedMemory"); + return VK_SUCCESS; + } + } + } + + res = blockVector.Allocate( + size, + alignment, + finalCreateInfo, + suballocType, + allocationCount, + pAllocations); + if(res == VK_SUCCESS) + return VK_SUCCESS; + + // Try dedicated memory. + if(canAllocateDedicated && !dedicatedPreferred) + { + res = AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + blockVector.GetAllocationNextPtr()); + if(res == VK_SUCCESS) + { + // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here. + VMA_DEBUG_LOG(" Allocated as DedicatedMemory"); + return VK_SUCCESS; + } + } + // Everything failed: Return error code. + VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); + return res; + } +} + +VkResult VmaAllocator_T::AllocateDedicatedMemory( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + uint32_t memTypeIndex, + bool map, + bool isUserDataString, + bool isMappingAllowed, + bool canAliasMemory, + void* pUserData, + float priority, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VkFlags dedicatedBufferImageUsage, + size_t allocationCount, + VmaAllocation* pAllocations, + const void* pNextChain) +{ + VMA_ASSERT(allocationCount > 0 && pAllocations); + + VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; + allocInfo.memoryTypeIndex = memTypeIndex; + allocInfo.allocationSize = size; + allocInfo.pNext = pNextChain; + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR }; + if(!canAliasMemory) + { + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + if(dedicatedBuffer != VK_NULL_HANDLE) + { + VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE); + dedicatedAllocInfo.buffer = dedicatedBuffer; + VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo); + } + else if(dedicatedImage != VK_NULL_HANDLE) + { + dedicatedAllocInfo.image = dedicatedImage; + VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo); + } + } + } +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + +#if VMA_BUFFER_DEVICE_ADDRESS + VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR }; + if(m_UseKhrBufferDeviceAddress) + { + bool canContainBufferWithDeviceAddress = true; + if(dedicatedBuffer != VK_NULL_HANDLE) + { + canContainBufferWithDeviceAddress = dedicatedBufferImageUsage == UINT32_MAX || // Usage flags unknown + (dedicatedBufferImageUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0; + } + else if(dedicatedImage != VK_NULL_HANDLE) + { + canContainBufferWithDeviceAddress = false; + } + if(canContainBufferWithDeviceAddress) + { + allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR; + VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo); + } + } +#endif // #if VMA_BUFFER_DEVICE_ADDRESS + +#if VMA_MEMORY_PRIORITY + VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT }; + if(m_UseExtMemoryPriority) + { + VMA_ASSERT(priority >= 0.f && priority <= 1.f); + priorityInfo.priority = priority; + VmaPnextChainPushFront(&allocInfo, &priorityInfo); + } +#endif // #if VMA_MEMORY_PRIORITY + +#if VMA_EXTERNAL_MEMORY + // Attach VkExportMemoryAllocateInfoKHR if necessary. + VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR }; + exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex); + if(exportMemoryAllocInfo.handleTypes != 0) + { + VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo); + } +#endif // #if VMA_EXTERNAL_MEMORY + + size_t allocIndex; + VkResult res = VK_SUCCESS; + for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + res = AllocateDedicatedMemoryPage( + pool, + size, + suballocType, + memTypeIndex, + allocInfo, + map, + isUserDataString, + isMappingAllowed, + pUserData, + pAllocations + allocIndex); + if(res != VK_SUCCESS) + { + break; + } + } + + if(res == VK_SUCCESS) + { + for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + dedicatedAllocations.Register(pAllocations[allocIndex]); + } + VMA_DEBUG_LOG(" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex); + } + else + { + // Free all already created allocations. + while(allocIndex--) + { + VmaAllocation currAlloc = pAllocations[allocIndex]; + VkDeviceMemory hMemory = currAlloc->GetMemory(); + + /* + There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory + before vkFreeMemory. + + if(currAlloc->GetMappedData() != VMA_NULL) + { + (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory); + } + */ + + FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory); + m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize()); + m_AllocationObjectAllocator.Free(currAlloc); + } + + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + } + + return res; +} + +VkResult VmaAllocator_T::AllocateDedicatedMemoryPage( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + uint32_t memTypeIndex, + const VkMemoryAllocateInfo& allocInfo, + bool map, + bool isUserDataString, + bool isMappingAllowed, + void* pUserData, + VmaAllocation* pAllocation) +{ + VkDeviceMemory hMemory = VK_NULL_HANDLE; + VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory); + if(res < 0) + { + VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); + return res; + } + + void* pMappedData = VMA_NULL; + if(map) + { + res = (*m_VulkanFunctions.vkMapMemory)( + m_hDevice, + hMemory, + 0, + VK_WHOLE_SIZE, + 0, + &pMappedData); + if(res < 0) + { + VMA_DEBUG_LOG(" vkMapMemory FAILED"); + FreeVulkanMemory(memTypeIndex, size, hMemory); + return res; + } + } + + *pAllocation = m_AllocationObjectAllocator.Allocate(isMappingAllowed); + (*pAllocation)->InitDedicatedAllocation(pool, memTypeIndex, hMemory, suballocType, pMappedData, size); + if (isUserDataString) + (*pAllocation)->SetName(this, (const char*)pUserData); + else + (*pAllocation)->SetUserData(this, pUserData); + m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size); + if(VMA_DEBUG_INITIALIZE_ALLOCATIONS) + { + FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED); + } + + return VK_SUCCESS; +} + +void VmaAllocator_T::GetBufferMemoryRequirements( + VkBuffer hBuffer, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const +{ +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR }; + memReqInfo.buffer = hBuffer; + + VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR }; + + VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR }; + VmaPnextChainPushFront(&memReq2, &memDedicatedReq); + + (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2); + + memReq = memReq2.memoryRequirements; + requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE); + prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE); + } + else +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + { + (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq); + requiresDedicatedAllocation = false; + prefersDedicatedAllocation = false; + } +} + +void VmaAllocator_T::GetImageMemoryRequirements( + VkImage hImage, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const +{ +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR }; + memReqInfo.image = hImage; + + VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR }; + + VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR }; + VmaPnextChainPushFront(&memReq2, &memDedicatedReq); + + (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2); + + memReq = memReq2.memoryRequirements; + requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE); + prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE); + } + else +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + { + (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq); + requiresDedicatedAllocation = false; + prefersDedicatedAllocation = false; + } +} + +VkResult VmaAllocator_T::FindMemoryTypeIndex( + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkFlags bufImgUsage, + uint32_t* pMemoryTypeIndex) const +{ + memoryTypeBits &= GetGlobalMemoryTypeBits(); + + if(pAllocationCreateInfo->memoryTypeBits != 0) + { + memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits; + } + + VkMemoryPropertyFlags requiredFlags = 0, preferredFlags = 0, notPreferredFlags = 0; + if(!FindMemoryPreferences( + IsIntegratedGpu(), + *pAllocationCreateInfo, + bufImgUsage, + requiredFlags, preferredFlags, notPreferredFlags)) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + *pMemoryTypeIndex = UINT32_MAX; + uint32_t minCost = UINT32_MAX; + for(uint32_t memTypeIndex = 0, memTypeBit = 1; + memTypeIndex < GetMemoryTypeCount(); + ++memTypeIndex, memTypeBit <<= 1) + { + // This memory type is acceptable according to memoryTypeBits bitmask. + if((memTypeBit & memoryTypeBits) != 0) + { + const VkMemoryPropertyFlags currFlags = + m_MemProps.memoryTypes[memTypeIndex].propertyFlags; + // This memory type contains requiredFlags. + if((requiredFlags & ~currFlags) == 0) + { + // Calculate cost as number of bits from preferredFlags not present in this memory type. + uint32_t currCost = VMA_COUNT_BITS_SET(preferredFlags & ~currFlags) + + VMA_COUNT_BITS_SET(currFlags & notPreferredFlags); + // Remember memory type with lowest cost. + if(currCost < minCost) + { + *pMemoryTypeIndex = memTypeIndex; + if(currCost == 0) + { + return VK_SUCCESS; + } + minCost = currCost; + } + } + } + } + return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT; +} + +VkResult VmaAllocator_T::CalcMemTypeParams( + VmaAllocationCreateInfo& inoutCreateInfo, + uint32_t memTypeIndex, + VkDeviceSize size, + size_t allocationCount) +{ + // If memory type is not HOST_VISIBLE, disable MAPPED. + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 && + (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + inoutCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT; + } + + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0) + { + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex); + VmaBudget heapBudget = {}; + GetHeapBudgets(&heapBudget, heapIndex, 1); + if(heapBudget.usage + size * allocationCount > heapBudget.budget) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + } + return VK_SUCCESS; +} + +VkResult VmaAllocator_T::CalcAllocationParams( + VmaAllocationCreateInfo& inoutCreateInfo, + bool dedicatedRequired, + bool dedicatedPreferred) +{ + VMA_ASSERT((inoutCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT) && + "Specifying both flags VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT and VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT is incorrect."); + VMA_ASSERT((((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT) == 0 || + (inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0)) && + "Specifying VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT requires also VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT."); + if(inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO || inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE || inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_HOST) + { + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0) + { + VMA_ASSERT((inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0 && + "When using VMA_ALLOCATION_CREATE_MAPPED_BIT and usage = VMA_MEMORY_USAGE_AUTO*, you must also specify VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT."); + } + } + + // If memory is lazily allocated, it should be always dedicated. + if(dedicatedRequired || + inoutCreateInfo.usage == VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; + } + + if(inoutCreateInfo.pool != VK_NULL_HANDLE) + { + if(inoutCreateInfo.pool->m_BlockVector.HasExplicitBlockSize() && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0) + { + VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT while current custom pool doesn't support dedicated allocations."); + return VK_ERROR_FEATURE_NOT_PRESENT; + } + inoutCreateInfo.priority = inoutCreateInfo.pool->m_BlockVector.GetPriority(); + } + + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0) + { + VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense."); + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + if(VMA_DEBUG_ALWAYS_DEDICATED_MEMORY && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; + } + + // Non-auto USAGE values imply HOST_ACCESS flags. + // And so does VMA_MEMORY_USAGE_UNKNOWN because it is used with custom pools. + // Which specific flag is used doesn't matter. They change things only when used with VMA_MEMORY_USAGE_AUTO*. + // Otherwise they just protect from assert on mapping. + if(inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO && + inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE && + inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO_PREFER_HOST) + { + if((inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) == 0) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; + } + } + + return VK_SUCCESS; +} + +VkResult VmaAllocator_T::AllocateMemory( + const VkMemoryRequirements& vkMemReq, + bool requiresDedicatedAllocation, + bool prefersDedicatedAllocation, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VkFlags dedicatedBufferImageUsage, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + + VMA_ASSERT(VmaIsPow2(vkMemReq.alignment)); + + if(vkMemReq.size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + VmaAllocationCreateInfo createInfoFinal = createInfo; + VkResult res = CalcAllocationParams(createInfoFinal, requiresDedicatedAllocation, prefersDedicatedAllocation); + if(res != VK_SUCCESS) + return res; + + if(createInfoFinal.pool != VK_NULL_HANDLE) + { + VmaBlockVector& blockVector = createInfoFinal.pool->m_BlockVector; + return AllocateMemoryOfType( + createInfoFinal.pool, + vkMemReq.size, + vkMemReq.alignment, + prefersDedicatedAllocation, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + createInfoFinal, + blockVector.GetMemoryTypeIndex(), + suballocType, + createInfoFinal.pool->m_DedicatedAllocations, + blockVector, + allocationCount, + pAllocations); + } + else + { + // Bit mask of memory Vulkan types acceptable for this allocation. + uint32_t memoryTypeBits = vkMemReq.memoryTypeBits; + uint32_t memTypeIndex = UINT32_MAX; + res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex); + // Can't find any single memory type matching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT. + if(res != VK_SUCCESS) + return res; + do + { + VmaBlockVector* blockVector = m_pBlockVectors[memTypeIndex]; + VMA_ASSERT(blockVector && "Trying to use unsupported memory type!"); + res = AllocateMemoryOfType( + VK_NULL_HANDLE, + vkMemReq.size, + vkMemReq.alignment, + requiresDedicatedAllocation || prefersDedicatedAllocation, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + createInfoFinal, + memTypeIndex, + suballocType, + m_DedicatedAllocations[memTypeIndex], + *blockVector, + allocationCount, + pAllocations); + // Allocation succeeded + if(res == VK_SUCCESS) + return VK_SUCCESS; + + // Remove old memTypeIndex from list of possibilities. + memoryTypeBits &= ~(1u << memTypeIndex); + // Find alternative memTypeIndex. + res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex); + } while(res == VK_SUCCESS); + + // No other matching memory type index could be found. + // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once. + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } +} + +void VmaAllocator_T::FreeMemory( + size_t allocationCount, + const VmaAllocation* pAllocations) +{ + VMA_ASSERT(pAllocations); + + for(size_t allocIndex = allocationCount; allocIndex--; ) + { + VmaAllocation allocation = pAllocations[allocIndex]; + + if(allocation != VK_NULL_HANDLE) + { + if(VMA_DEBUG_INITIALIZE_ALLOCATIONS) + { + FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED); + } + + allocation->FreeName(this); + + switch(allocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaBlockVector* pBlockVector = VMA_NULL; + VmaPool hPool = allocation->GetParentPool(); + if(hPool != VK_NULL_HANDLE) + { + pBlockVector = &hPool->m_BlockVector; + } + else + { + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + pBlockVector = m_pBlockVectors[memTypeIndex]; + VMA_ASSERT(pBlockVector && "Trying to free memory of unsupported type!"); + } + pBlockVector->Free(allocation); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + FreeDedicatedMemory(allocation); + break; + default: + VMA_ASSERT(0); + } + } + } +} + +void VmaAllocator_T::CalculateStatistics(VmaTotalStatistics* pStats) +{ + // Initialize. + VmaClearDetailedStatistics(pStats->total); + for(uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) + VmaClearDetailedStatistics(pStats->memoryType[i]); + for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) + VmaClearDetailedStatistics(pStats->memoryHeap[i]); + + // Process default pools. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex]; + if (pBlockVector != VMA_NULL) + pBlockVector->AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + + // Process custom pools. + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + for(VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + VmaBlockVector& blockVector = pool->m_BlockVector; + const uint32_t memTypeIndex = blockVector.GetMemoryTypeIndex(); + blockVector.AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + pool->m_DedicatedAllocations.AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + } + + // Process dedicated allocations. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + m_DedicatedAllocations[memTypeIndex].AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + + // Sum from memory types to memory heaps. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + const uint32_t memHeapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex; + VmaAddDetailedStatistics(pStats->memoryHeap[memHeapIndex], pStats->memoryType[memTypeIndex]); + } + + // Sum from memory heaps to total. + for(uint32_t memHeapIndex = 0; memHeapIndex < GetMemoryHeapCount(); ++memHeapIndex) + VmaAddDetailedStatistics(pStats->total, pStats->memoryHeap[memHeapIndex]); + + VMA_ASSERT(pStats->total.statistics.allocationCount == 0 || + pStats->total.allocationSizeMax >= pStats->total.allocationSizeMin); + VMA_ASSERT(pStats->total.unusedRangeCount == 0 || + pStats->total.unusedRangeSizeMax >= pStats->total.unusedRangeSizeMin); +} + +void VmaAllocator_T::GetHeapBudgets(VmaBudget* outBudgets, uint32_t firstHeap, uint32_t heapCount) +{ +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + if(m_Budget.m_OperationsSinceBudgetFetch < 30) + { + VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex); + for(uint32_t i = 0; i < heapCount; ++i, ++outBudgets) + { + const uint32_t heapIndex = firstHeap + i; + + outBudgets->statistics.blockCount = m_Budget.m_BlockCount[heapIndex]; + outBudgets->statistics.allocationCount = m_Budget.m_AllocationCount[heapIndex]; + outBudgets->statistics.blockBytes = m_Budget.m_BlockBytes[heapIndex]; + outBudgets->statistics.allocationBytes = m_Budget.m_AllocationBytes[heapIndex]; + + if(m_Budget.m_VulkanUsage[heapIndex] + outBudgets->statistics.blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]) + { + outBudgets->usage = m_Budget.m_VulkanUsage[heapIndex] + + outBudgets->statistics.blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]; + } + else + { + outBudgets->usage = 0; + } + + // Have to take MIN with heap size because explicit HeapSizeLimit is included in it. + outBudgets->budget = VMA_MIN( + m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size); + } + } + else + { + UpdateVulkanBudget(); // Outside of mutex lock + GetHeapBudgets(outBudgets, firstHeap, heapCount); // Recursion + } + } + else +#endif + { + for(uint32_t i = 0; i < heapCount; ++i, ++outBudgets) + { + const uint32_t heapIndex = firstHeap + i; + + outBudgets->statistics.blockCount = m_Budget.m_BlockCount[heapIndex]; + outBudgets->statistics.allocationCount = m_Budget.m_AllocationCount[heapIndex]; + outBudgets->statistics.blockBytes = m_Budget.m_BlockBytes[heapIndex]; + outBudgets->statistics.allocationBytes = m_Budget.m_AllocationBytes[heapIndex]; + + outBudgets->usage = outBudgets->statistics.blockBytes; + outBudgets->budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics. + } + } +} + +void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo) +{ + pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex(); + pAllocationInfo->deviceMemory = hAllocation->GetMemory(); + pAllocationInfo->offset = hAllocation->GetOffset(); + pAllocationInfo->size = hAllocation->GetSize(); + pAllocationInfo->pMappedData = hAllocation->GetMappedData(); + pAllocationInfo->pUserData = hAllocation->GetUserData(); + pAllocationInfo->pName = hAllocation->GetName(); +} + +VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool) +{ + VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->memoryTypeIndex, pCreateInfo->flags); + + VmaPoolCreateInfo newCreateInfo = *pCreateInfo; + + // Protection against uninitialized new structure member. If garbage data are left there, this pointer dereference would crash. + if(pCreateInfo->pMemoryAllocateNext) + { + VMA_ASSERT(((const VkBaseInStructure*)pCreateInfo->pMemoryAllocateNext)->sType != 0); + } + + if(newCreateInfo.maxBlockCount == 0) + { + newCreateInfo.maxBlockCount = SIZE_MAX; + } + if(newCreateInfo.minBlockCount > newCreateInfo.maxBlockCount) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + // Memory type index out of range or forbidden. + if(pCreateInfo->memoryTypeIndex >= GetMemoryTypeCount() || + ((1u << pCreateInfo->memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + if(newCreateInfo.minAllocationAlignment > 0) + { + VMA_ASSERT(VmaIsPow2(newCreateInfo.minAllocationAlignment)); + } + + const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex); + + *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo, preferredBlockSize); + + VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks(); + if(res != VK_SUCCESS) + { + vma_delete(this, *pPool); + *pPool = VMA_NULL; + return res; + } + + // Add to m_Pools. + { + VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex); + (*pPool)->SetId(m_NextPoolId++); + m_Pools.PushBack(*pPool); + } + + return VK_SUCCESS; +} + +void VmaAllocator_T::DestroyPool(VmaPool pool) +{ + // Remove from m_Pools. + { + VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex); + m_Pools.Remove(pool); + } + + vma_delete(this, pool); +} + +void VmaAllocator_T::GetPoolStatistics(VmaPool pool, VmaStatistics* pPoolStats) +{ + VmaClearStatistics(*pPoolStats); + pool->m_BlockVector.AddStatistics(*pPoolStats); + pool->m_DedicatedAllocations.AddStatistics(*pPoolStats); +} + +void VmaAllocator_T::CalculatePoolStatistics(VmaPool pool, VmaDetailedStatistics* pPoolStats) +{ + VmaClearDetailedStatistics(*pPoolStats); + pool->m_BlockVector.AddDetailedStatistics(*pPoolStats); + pool->m_DedicatedAllocations.AddDetailedStatistics(*pPoolStats); +} + +void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex) +{ + m_CurrentFrameIndex.store(frameIndex); + +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + UpdateVulkanBudget(); + } +#endif // #if VMA_MEMORY_BUDGET +} + +VkResult VmaAllocator_T::CheckPoolCorruption(VmaPool hPool) +{ + return hPool->m_BlockVector.CheckCorruption(); +} + +VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits) +{ + VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT; + + // Process default pools. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex]; + if(pBlockVector != VMA_NULL) + { + VkResult localRes = pBlockVector->CheckCorruption(); + switch(localRes) + { + case VK_ERROR_FEATURE_NOT_PRESENT: + break; + case VK_SUCCESS: + finalRes = VK_SUCCESS; + break; + default: + return localRes; + } + } + } + + // Process custom pools. + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + for(VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0) + { + VkResult localRes = pool->m_BlockVector.CheckCorruption(); + switch(localRes) + { + case VK_ERROR_FEATURE_NOT_PRESENT: + break; + case VK_SUCCESS: + finalRes = VK_SUCCESS; + break; + default: + return localRes; + } + } + } + } + + return finalRes; +} + +VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory) +{ + AtomicTransactionalIncrement deviceMemoryCountIncrement; + const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount); +#if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT + if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount) + { + return VK_ERROR_TOO_MANY_OBJECTS; + } +#endif + + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex); + + // HeapSizeLimit is in effect for this heap. + if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0) + { + const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size; + VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex]; + for(;;) + { + const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize; + if(blockBytesAfterAllocation > heapSize) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation)) + { + break; + } + } + } + else + { + m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize; + } + ++m_Budget.m_BlockCount[heapIndex]; + + // VULKAN CALL vkAllocateMemory. + VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory); + + if(res == VK_SUCCESS) + { +#if VMA_MEMORY_BUDGET + ++m_Budget.m_OperationsSinceBudgetFetch; +#endif + + // Informative callback. + if(m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL) + { + (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.pUserData); + } + + deviceMemoryCountIncrement.Commit(); + } + else + { + --m_Budget.m_BlockCount[heapIndex]; + m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize; + } + + return res; +} + +void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory) +{ + // Informative callback. + if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL) + { + (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.pUserData); + } + + // VULKAN CALL vkFreeMemory. + (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks()); + + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType); + --m_Budget.m_BlockCount[heapIndex]; + m_Budget.m_BlockBytes[heapIndex] -= size; + + --m_DeviceMemoryCount; +} + +VkResult VmaAllocator_T::BindVulkanBuffer( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkBuffer buffer, + const void* pNext) +{ + if(pNext != VMA_NULL) + { +#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) && + m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL) + { + VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR }; + bindBufferMemoryInfo.pNext = pNext; + bindBufferMemoryInfo.buffer = buffer; + bindBufferMemoryInfo.memory = memory; + bindBufferMemoryInfo.memoryOffset = memoryOffset; + return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo); + } + else +#endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + { + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + } + else + { + return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset); + } +} + +VkResult VmaAllocator_T::BindVulkanImage( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkImage image, + const void* pNext) +{ + if(pNext != VMA_NULL) + { +#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) && + m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL) + { + VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR }; + bindBufferMemoryInfo.pNext = pNext; + bindBufferMemoryInfo.image = image; + bindBufferMemoryInfo.memory = memory; + bindBufferMemoryInfo.memoryOffset = memoryOffset; + return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo); + } + else +#endif // #if VMA_BIND_MEMORY2 + { + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + } + else + { + return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset); + } +} + +VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData) +{ + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + char *pBytes = VMA_NULL; + VkResult res = pBlock->Map(this, 1, (void**)&pBytes); + if(res == VK_SUCCESS) + { + *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset(); + hAllocation->BlockAllocMap(); + } + return res; + } + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + return hAllocation->DedicatedAllocMap(this, ppData); + default: + VMA_ASSERT(0); + return VK_ERROR_MEMORY_MAP_FAILED; + } +} + +void VmaAllocator_T::Unmap(VmaAllocation hAllocation) +{ + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + hAllocation->BlockAllocUnmap(); + pBlock->Unmap(this, 1); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + hAllocation->DedicatedAllocUnmap(this); + break; + default: + VMA_ASSERT(0); + } +} + +VkResult VmaAllocator_T::BindBufferMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext) +{ + VkResult res = VK_SUCCESS; + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext); + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block."); + res = pBlock->BindBufferMemory(this, hAllocation, allocationLocalOffset, hBuffer, pNext); + break; + } + default: + VMA_ASSERT(0); + } + return res; +} + +VkResult VmaAllocator_T::BindImageMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext) +{ + VkResult res = VK_SUCCESS; + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext); + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock(); + VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block."); + res = pBlock->BindImageMemory(this, hAllocation, allocationLocalOffset, hImage, pNext); + break; + } + default: + VMA_ASSERT(0); + } + return res; +} + +VkResult VmaAllocator_T::FlushOrInvalidateAllocation( + VmaAllocation hAllocation, + VkDeviceSize offset, VkDeviceSize size, + VMA_CACHE_OPERATION op) +{ + VkResult res = VK_SUCCESS; + + VkMappedMemoryRange memRange = {}; + if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange)) + { + switch(op) + { + case VMA_CACHE_FLUSH: + res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange); + break; + case VMA_CACHE_INVALIDATE: + res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange); + break; + default: + VMA_ASSERT(0); + } + } + // else: Just ignore this call. + return res; +} + +VkResult VmaAllocator_T::FlushOrInvalidateAllocations( + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, const VkDeviceSize* sizes, + VMA_CACHE_OPERATION op) +{ + typedef VmaStlAllocator RangeAllocator; + typedef VmaSmallVector RangeVector; + RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks())); + + for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + const VmaAllocation alloc = allocations[allocIndex]; + const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0; + const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE; + VkMappedMemoryRange newRange; + if(GetFlushOrInvalidateRange(alloc, offset, size, newRange)) + { + ranges.push_back(newRange); + } + } + + VkResult res = VK_SUCCESS; + if(!ranges.empty()) + { + switch(op) + { + case VMA_CACHE_FLUSH: + res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data()); + break; + case VMA_CACHE_INVALIDATE: + res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data()); + break; + default: + VMA_ASSERT(0); + } + } + // else: Just ignore this call. + return res; +} + +void VmaAllocator_T::FreeDedicatedMemory(const VmaAllocation allocation) +{ + VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + VmaPool parentPool = allocation->GetParentPool(); + if(parentPool == VK_NULL_HANDLE) + { + // Default pool + m_DedicatedAllocations[memTypeIndex].Unregister(allocation); + } + else + { + // Custom pool + parentPool->m_DedicatedAllocations.Unregister(allocation); + } + + VkDeviceMemory hMemory = allocation->GetMemory(); + + /* + There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory + before vkFreeMemory. + + if(allocation->GetMappedData() != VMA_NULL) + { + (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory); + } + */ + + FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory); + + m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize()); + m_AllocationObjectAllocator.Free(allocation); + + VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex); +} + +uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits() const +{ + VkBufferCreateInfo dummyBufCreateInfo; + VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo); + + uint32_t memoryTypeBits = 0; + + // Create buffer. + VkBuffer buf = VK_NULL_HANDLE; + VkResult res = (*GetVulkanFunctions().vkCreateBuffer)( + m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf); + if(res == VK_SUCCESS) + { + // Query for supported memory types. + VkMemoryRequirements memReq; + (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq); + memoryTypeBits = memReq.memoryTypeBits; + + // Destroy buffer. + (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks()); + } + + return memoryTypeBits; +} + +uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits() const +{ + // Make sure memory information is already fetched. + VMA_ASSERT(GetMemoryTypeCount() > 0); + + uint32_t memoryTypeBits = UINT32_MAX; + + if(!m_UseAmdDeviceCoherentMemory) + { + // Exclude memory types that have VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0) + { + memoryTypeBits &= ~(1u << memTypeIndex); + } + } + } + + return memoryTypeBits; +} + +bool VmaAllocator_T::GetFlushOrInvalidateRange( + VmaAllocation allocation, + VkDeviceSize offset, VkDeviceSize size, + VkMappedMemoryRange& outRange) const +{ + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex)) + { + const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize; + const VkDeviceSize allocationSize = allocation->GetSize(); + VMA_ASSERT(offset <= allocationSize); + + outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; + outRange.pNext = VMA_NULL; + outRange.memory = allocation->GetMemory(); + + switch(allocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize); + if(size == VK_WHOLE_SIZE) + { + outRange.size = allocationSize - outRange.offset; + } + else + { + VMA_ASSERT(offset + size <= allocationSize); + outRange.size = VMA_MIN( + VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize), + allocationSize - outRange.offset); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + // 1. Still within this allocation. + outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize); + if(size == VK_WHOLE_SIZE) + { + size = allocationSize - offset; + } + else + { + VMA_ASSERT(offset + size <= allocationSize); + } + outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize); + + // 2. Adjust to whole block. + const VkDeviceSize allocationOffset = allocation->GetOffset(); + VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0); + const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize(); + outRange.offset += allocationOffset; + outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset); + + break; + } + default: + VMA_ASSERT(0); + } + return true; + } + return false; +} + +#if VMA_MEMORY_BUDGET +void VmaAllocator_T::UpdateVulkanBudget() +{ + VMA_ASSERT(m_UseExtMemoryBudget); + + VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR }; + + VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT }; + VmaPnextChainPushFront(&memProps, &budgetProps); + + GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps); + + { + VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex); + + for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex) + { + m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex]; + m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex]; + m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load(); + + // Some bugged drivers return the budget incorrectly, e.g. 0 or much bigger than heap size. + if(m_Budget.m_VulkanBudget[heapIndex] == 0) + { + m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics. + } + else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size) + { + m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size; + } + if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0) + { + m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]; + } + } + m_Budget.m_OperationsSinceBudgetFetch = 0; + } +} +#endif // VMA_MEMORY_BUDGET + +void VmaAllocator_T::FillAllocation(const VmaAllocation hAllocation, uint8_t pattern) +{ + if(VMA_DEBUG_INITIALIZE_ALLOCATIONS && + hAllocation->IsMappingAllowed() && + (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) + { + void* pData = VMA_NULL; + VkResult res = Map(hAllocation, &pData); + if(res == VK_SUCCESS) + { + memset(pData, (int)pattern, (size_t)hAllocation->GetSize()); + FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH); + Unmap(hAllocation); + } + else + { + VMA_ASSERT(0 && "VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation."); + } + } +} + +uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits() +{ + uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load(); + if(memoryTypeBits == UINT32_MAX) + { + memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits(); + m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits); + } + return memoryTypeBits; +} + +#if VMA_STATS_STRING_ENABLED +void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json) +{ + json.WriteString("DefaultPools"); + json.BeginObject(); + { + for (uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex]; + VmaDedicatedAllocationList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex]; + if (pBlockVector != VMA_NULL) + { + json.BeginString("Type "); + json.ContinueString(memTypeIndex); + json.EndString(); + json.BeginObject(); + { + json.WriteString("PreferredBlockSize"); + json.WriteNumber(pBlockVector->GetPreferredBlockSize()); + + json.WriteString("Blocks"); + pBlockVector->PrintDetailedMap(json); + + json.WriteString("DedicatedAllocations"); + dedicatedAllocList.BuildStatsString(json); + } + json.EndObject(); + } + } + } + json.EndObject(); + + json.WriteString("CustomPools"); + json.BeginObject(); + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + if (!m_Pools.IsEmpty()) + { + for (uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + bool displayType = true; + size_t index = 0; + for (VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + VmaBlockVector& blockVector = pool->m_BlockVector; + if (blockVector.GetMemoryTypeIndex() == memTypeIndex) + { + if (displayType) + { + json.BeginString("Type "); + json.ContinueString(memTypeIndex); + json.EndString(); + json.BeginArray(); + displayType = false; + } + + json.BeginObject(); + { + json.WriteString("Name"); + json.BeginString(); + json.ContinueString_Size(index++); + if (pool->GetName()) + { + json.ContinueString(" - "); + json.ContinueString(pool->GetName()); + } + json.EndString(); + + json.WriteString("PreferredBlockSize"); + json.WriteNumber(blockVector.GetPreferredBlockSize()); + + json.WriteString("Blocks"); + blockVector.PrintDetailedMap(json); + + json.WriteString("DedicatedAllocations"); + pool->m_DedicatedAllocations.BuildStatsString(json); + } + json.EndObject(); + } + } + + if (!displayType) + json.EndArray(); + } + } + } + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_ALLOCATOR_T_FUNCTIONS + + +#ifndef _VMA_PUBLIC_INTERFACE +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator( + const VmaAllocatorCreateInfo* pCreateInfo, + VmaAllocator* pAllocator) +{ + VMA_ASSERT(pCreateInfo && pAllocator); + VMA_ASSERT(pCreateInfo->vulkanApiVersion == 0 || + (VK_VERSION_MAJOR(pCreateInfo->vulkanApiVersion) == 1 && VK_VERSION_MINOR(pCreateInfo->vulkanApiVersion) <= 3)); + VMA_DEBUG_LOG("vmaCreateAllocator"); + *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo); + VkResult result = (*pAllocator)->Init(pCreateInfo); + if(result < 0) + { + vma_delete(pCreateInfo->pAllocationCallbacks, *pAllocator); + *pAllocator = VK_NULL_HANDLE; + } + return result; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator( + VmaAllocator allocator) +{ + if(allocator != VK_NULL_HANDLE) + { + VMA_DEBUG_LOG("vmaDestroyAllocator"); + VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks; // Have to copy the callbacks when destroying. + vma_delete(&allocationCallbacks, allocator); + } +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo* pAllocatorInfo) +{ + VMA_ASSERT(allocator && pAllocatorInfo); + pAllocatorInfo->instance = allocator->m_hInstance; + pAllocatorInfo->physicalDevice = allocator->GetPhysicalDevice(); + pAllocatorInfo->device = allocator->m_hDevice; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties( + VmaAllocator allocator, + const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties) +{ + VMA_ASSERT(allocator && ppPhysicalDeviceProperties); + *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties( + VmaAllocator allocator, + const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties) +{ + VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties); + *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties( + VmaAllocator allocator, + uint32_t memoryTypeIndex, + VkMemoryPropertyFlags* pFlags) +{ + VMA_ASSERT(allocator && pFlags); + VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount()); + *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex( + VmaAllocator allocator, + uint32_t frameIndex) +{ + VMA_ASSERT(allocator); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->SetCurrentFrameIndex(frameIndex); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics( + VmaAllocator allocator, + VmaTotalStatistics* pStats) +{ + VMA_ASSERT(allocator && pStats); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + allocator->CalculateStatistics(pStats); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets( + VmaAllocator allocator, + VmaBudget* pBudgets) +{ + VMA_ASSERT(allocator && pBudgets); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + allocator->GetHeapBudgets(pBudgets, 0, allocator->GetMemoryHeapCount()); +} + +#if VMA_STATS_STRING_ENABLED + +VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString( + VmaAllocator allocator, + char** ppStatsString, + VkBool32 detailedMap) +{ + VMA_ASSERT(allocator && ppStatsString); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VmaStringBuilder sb(allocator->GetAllocationCallbacks()); + { + VmaBudget budgets[VK_MAX_MEMORY_HEAPS]; + allocator->GetHeapBudgets(budgets, 0, allocator->GetMemoryHeapCount()); + + VmaTotalStatistics stats; + allocator->CalculateStatistics(&stats); + + VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb); + json.BeginObject(); + { + json.WriteString("General"); + json.BeginObject(); + { + const VkPhysicalDeviceProperties& deviceProperties = allocator->m_PhysicalDeviceProperties; + const VkPhysicalDeviceMemoryProperties& memoryProperties = allocator->m_MemProps; + + json.WriteString("API"); + json.WriteString("Vulkan"); + + json.WriteString("apiVersion"); + json.BeginString(); + json.ContinueString(VK_API_VERSION_MAJOR(deviceProperties.apiVersion)); + json.ContinueString("."); + json.ContinueString(VK_API_VERSION_MINOR(deviceProperties.apiVersion)); + json.ContinueString("."); + json.ContinueString(VK_API_VERSION_PATCH(deviceProperties.apiVersion)); + json.EndString(); + + json.WriteString("GPU"); + json.WriteString(deviceProperties.deviceName); + json.WriteString("deviceType"); + json.WriteNumber(static_cast(deviceProperties.deviceType)); + + json.WriteString("maxMemoryAllocationCount"); + json.WriteNumber(deviceProperties.limits.maxMemoryAllocationCount); + json.WriteString("bufferImageGranularity"); + json.WriteNumber(deviceProperties.limits.bufferImageGranularity); + json.WriteString("nonCoherentAtomSize"); + json.WriteNumber(deviceProperties.limits.nonCoherentAtomSize); + + json.WriteString("memoryHeapCount"); + json.WriteNumber(memoryProperties.memoryHeapCount); + json.WriteString("memoryTypeCount"); + json.WriteNumber(memoryProperties.memoryTypeCount); + } + json.EndObject(); + } + { + json.WriteString("Total"); + VmaPrintDetailedStatistics(json, stats.total); + } + { + json.WriteString("MemoryInfo"); + json.BeginObject(); + { + for (uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex) + { + json.BeginString("Heap "); + json.ContinueString(heapIndex); + json.EndString(); + json.BeginObject(); + { + const VkMemoryHeap& heapInfo = allocator->m_MemProps.memoryHeaps[heapIndex]; + json.WriteString("Flags"); + json.BeginArray(true); + { + if (heapInfo.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) + json.WriteString("DEVICE_LOCAL"); + #if VMA_VULKAN_VERSION >= 1001000 + if (heapInfo.flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) + json.WriteString("MULTI_INSTANCE"); + #endif + + VkMemoryHeapFlags flags = heapInfo.flags & + ~(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT + #if VMA_VULKAN_VERSION >= 1001000 + | VK_MEMORY_HEAP_MULTI_INSTANCE_BIT + #endif + ); + if (flags != 0) + json.WriteNumber(flags); + } + json.EndArray(); + + json.WriteString("Size"); + json.WriteNumber(heapInfo.size); + + json.WriteString("Budget"); + json.BeginObject(); + { + json.WriteString("BudgetBytes"); + json.WriteNumber(budgets[heapIndex].budget); + json.WriteString("UsageBytes"); + json.WriteNumber(budgets[heapIndex].usage); + } + json.EndObject(); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats.memoryHeap[heapIndex]); + + json.WriteString("MemoryPools"); + json.BeginObject(); + { + for (uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex) + { + if (allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex) + { + json.BeginString("Type "); + json.ContinueString(typeIndex); + json.EndString(); + json.BeginObject(); + { + json.WriteString("Flags"); + json.BeginArray(true); + { + VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags; + if (flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) + json.WriteString("DEVICE_LOCAL"); + if (flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) + json.WriteString("HOST_VISIBLE"); + if (flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) + json.WriteString("HOST_COHERENT"); + if (flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) + json.WriteString("HOST_CACHED"); + if (flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) + json.WriteString("LAZILY_ALLOCATED"); + #if VMA_VULKAN_VERSION >= 1001000 + if (flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) + json.WriteString("PROTECTED"); + #endif + #if VK_AMD_device_coherent_memory + if (flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) + json.WriteString("DEVICE_COHERENT_AMD"); + if (flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) + json.WriteString("DEVICE_UNCACHED_AMD"); + #endif + + flags &= ~(VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT + #if VMA_VULKAN_VERSION >= 1001000 + | VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT + #endif + #if VK_AMD_device_coherent_memory + | VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY + | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY + #endif + | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT + | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT + | VK_MEMORY_PROPERTY_HOST_CACHED_BIT); + if (flags != 0) + json.WriteNumber(flags); + } + json.EndArray(); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats.memoryType[typeIndex]); + } + json.EndObject(); + } + } + + } + json.EndObject(); + } + json.EndObject(); + } + } + json.EndObject(); + } + + if (detailedMap == VK_TRUE) + allocator->PrintDetailedMap(json); + + json.EndObject(); + } + + *ppStatsString = VmaCreateStringCopy(allocator->GetAllocationCallbacks(), sb.GetData(), sb.GetLength()); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString( + VmaAllocator allocator, + char* pStatsString) +{ + if(pStatsString != VMA_NULL) + { + VMA_ASSERT(allocator); + VmaFreeString(allocator->GetAllocationCallbacks(), pStatsString); + } +} + +#endif // VMA_STATS_STRING_ENABLED + +/* +This function is not protected by any mutex because it just reads immutable data. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex( + VmaAllocator allocator, + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + return allocator->FindMemoryTypeIndex(memoryTypeBits, pAllocationCreateInfo, UINT32_MAX, pMemoryTypeIndex); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pBufferCreateInfo != VMA_NULL); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + const VkDevice hDev = allocator->m_hDevice; + const VmaVulkanFunctions* funcs = &allocator->GetVulkanFunctions(); + VkResult res; + +#if VMA_VULKAN_VERSION >= 1003000 + if(funcs->vkGetDeviceBufferMemoryRequirements) + { + // Can query straight from VkBufferCreateInfo :) + VkDeviceBufferMemoryRequirements devBufMemReq = {VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS}; + devBufMemReq.pCreateInfo = pBufferCreateInfo; + + VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2}; + (*funcs->vkGetDeviceBufferMemoryRequirements)(hDev, &devBufMemReq, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, pBufferCreateInfo->usage, pMemoryTypeIndex); + } + else +#endif // #if VMA_VULKAN_VERSION >= 1003000 + { + // Must create a dummy buffer to query :( + VkBuffer hBuffer = VK_NULL_HANDLE; + res = funcs->vkCreateBuffer( + hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer); + if(res == VK_SUCCESS) + { + VkMemoryRequirements memReq = {}; + funcs->vkGetBufferMemoryRequirements(hDev, hBuffer, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryTypeBits, pAllocationCreateInfo, pBufferCreateInfo->usage, pMemoryTypeIndex); + + funcs->vkDestroyBuffer( + hDev, hBuffer, allocator->GetAllocationCallbacks()); + } + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo( + VmaAllocator allocator, + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pImageCreateInfo != VMA_NULL); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + const VkDevice hDev = allocator->m_hDevice; + const VmaVulkanFunctions* funcs = &allocator->GetVulkanFunctions(); + VkResult res; + +#if VMA_VULKAN_VERSION >= 1003000 + if(funcs->vkGetDeviceImageMemoryRequirements) + { + // Can query straight from VkImageCreateInfo :) + VkDeviceImageMemoryRequirements devImgMemReq = {VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS}; + devImgMemReq.pCreateInfo = pImageCreateInfo; + VMA_ASSERT(pImageCreateInfo->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY && (pImageCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT_COPY) == 0 && + "Cannot use this VkImageCreateInfo with vmaFindMemoryTypeIndexForImageInfo as I don't know what to pass as VkDeviceImageMemoryRequirements::planeAspect."); + + VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2}; + (*funcs->vkGetDeviceImageMemoryRequirements)(hDev, &devImgMemReq, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, pImageCreateInfo->usage, pMemoryTypeIndex); + } + else +#endif // #if VMA_VULKAN_VERSION >= 1003000 + { + // Must create a dummy image to query :( + VkImage hImage = VK_NULL_HANDLE; + res = funcs->vkCreateImage( + hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage); + if(res == VK_SUCCESS) + { + VkMemoryRequirements memReq = {}; + funcs->vkGetImageMemoryRequirements(hDev, hImage, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryTypeBits, pAllocationCreateInfo, pImageCreateInfo->usage, pMemoryTypeIndex); + + funcs->vkDestroyImage( + hDev, hImage, allocator->GetAllocationCallbacks()); + } + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool( + VmaAllocator allocator, + const VmaPoolCreateInfo* pCreateInfo, + VmaPool* pPool) +{ + VMA_ASSERT(allocator && pCreateInfo && pPool); + + VMA_DEBUG_LOG("vmaCreatePool"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->CreatePool(pCreateInfo, pPool); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( + VmaAllocator allocator, + VmaPool pool) +{ + VMA_ASSERT(allocator); + + if(pool == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaDestroyPool"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->DestroyPool(pool); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( + VmaAllocator allocator, + VmaPool pool, + VmaStatistics* pPoolStats) +{ + VMA_ASSERT(allocator && pool && pPoolStats); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->GetPoolStatistics(pool, pPoolStats); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics( + VmaAllocator allocator, + VmaPool pool, + VmaDetailedStatistics* pPoolStats) +{ + VMA_ASSERT(allocator && pool && pPoolStats); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->CalculatePoolStatistics(pool, pPoolStats); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool) +{ + VMA_ASSERT(allocator && pool); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VMA_DEBUG_LOG("vmaCheckPoolCorruption"); + + return allocator->CheckPoolCorruption(pool); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName( + VmaAllocator allocator, + VmaPool pool, + const char** ppName) +{ + VMA_ASSERT(allocator && pool && ppName); + + VMA_DEBUG_LOG("vmaGetPoolName"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *ppName = pool->GetName(); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName( + VmaAllocator allocator, + VmaPool pool, + const char* pName) +{ + VMA_ASSERT(allocator && pool); + + VMA_DEBUG_LOG("vmaSetPoolName"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + pool->SetName(pName); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory( + VmaAllocator allocator, + const VkMemoryRequirements* pVkMemoryRequirements, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkResult result = allocator->AllocateMemory( + *pVkMemoryRequirements, + false, // requiresDedicatedAllocation + false, // prefersDedicatedAllocation + VK_NULL_HANDLE, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + UINT32_MAX, // dedicatedBufferImageUsage + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_UNKNOWN, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return result; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages( + VmaAllocator allocator, + const VkMemoryRequirements* pVkMemoryRequirements, + const VmaAllocationCreateInfo* pCreateInfo, + size_t allocationCount, + VmaAllocation* pAllocations, + VmaAllocationInfo* pAllocationInfo) +{ + if(allocationCount == 0) + { + return VK_SUCCESS; + } + + VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations); + + VMA_DEBUG_LOG("vmaAllocateMemoryPages"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkResult result = allocator->AllocateMemory( + *pVkMemoryRequirements, + false, // requiresDedicatedAllocation + false, // prefersDedicatedAllocation + VK_NULL_HANDLE, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + UINT32_MAX, // dedicatedBufferImageUsage + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_UNKNOWN, + allocationCount, + pAllocations); + + if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) + { + for(size_t i = 0; i < allocationCount; ++i) + { + allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i); + } + } + + return result; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer( + VmaAllocator allocator, + VkBuffer buffer, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetBufferMemoryRequirements(buffer, vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation); + + VkResult result = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + buffer, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + UINT32_MAX, // dedicatedBufferImageUsage + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_BUFFER, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo && result == VK_SUCCESS) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return result; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage( + VmaAllocator allocator, + VkImage image, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemoryForImage"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetImageMemoryRequirements(image, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + VkResult result = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + VK_NULL_HANDLE, // dedicatedBuffer + image, // dedicatedImage + UINT32_MAX, // dedicatedBufferImageUsage + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo && result == VK_SUCCESS) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return result; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory( + VmaAllocator allocator, + VmaAllocation allocation) +{ + VMA_ASSERT(allocator); + + if(allocation == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaFreeMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->FreeMemory( + 1, // allocationCount + &allocation); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages( + VmaAllocator allocator, + size_t allocationCount, + const VmaAllocation* pAllocations) +{ + if(allocationCount == 0) + { + return; + } + + VMA_ASSERT(allocator); + + VMA_DEBUG_LOG("vmaFreeMemoryPages"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->FreeMemory(allocationCount, pAllocations); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo( + VmaAllocator allocator, + VmaAllocation allocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && allocation && pAllocationInfo); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->GetAllocationInfo(allocation, pAllocationInfo); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData( + VmaAllocator allocator, + VmaAllocation allocation, + void* pUserData) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocation->SetUserData(allocator, pUserData); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const char* VMA_NULLABLE pName) +{ + allocation->SetName(allocator, pName); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags) +{ + VMA_ASSERT(allocator && allocation && pFlags); + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + *pFlags = allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory( + VmaAllocator allocator, + VmaAllocation allocation, + void** ppData) +{ + VMA_ASSERT(allocator && allocation && ppData); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->Map(allocation, ppData); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory( + VmaAllocator allocator, + VmaAllocation allocation) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->Unmap(allocation); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize offset, + VkDeviceSize size) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_LOG("vmaFlushAllocation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH); + + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize offset, + VkDeviceSize size) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_LOG("vmaInvalidateAllocation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE); + + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations( + VmaAllocator allocator, + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, + const VkDeviceSize* sizes) +{ + VMA_ASSERT(allocator); + + if(allocationCount == 0) + { + return VK_SUCCESS; + } + + VMA_ASSERT(allocations); + + VMA_DEBUG_LOG("vmaFlushAllocations"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH); + + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations( + VmaAllocator allocator, + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, + const VkDeviceSize* sizes) +{ + VMA_ASSERT(allocator); + + if(allocationCount == 0) + { + return VK_SUCCESS; + } + + VMA_ASSERT(allocations); + + VMA_DEBUG_LOG("vmaInvalidateAllocations"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE); + + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption( + VmaAllocator allocator, + uint32_t memoryTypeBits) +{ + VMA_ASSERT(allocator); + + VMA_DEBUG_LOG("vmaCheckCorruption"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->CheckCorruption(memoryTypeBits); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation( + VmaAllocator allocator, + const VmaDefragmentationInfo* pInfo, + VmaDefragmentationContext* pContext) +{ + VMA_ASSERT(allocator && pInfo && pContext); + + VMA_DEBUG_LOG("vmaBeginDefragmentation"); + + if (pInfo->pool != VMA_NULL) + { + // Check if run on supported algorithms + if (pInfo->pool->m_BlockVector.GetAlgorithm() & VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *pContext = vma_new(allocator, VmaDefragmentationContext_T)(allocator, *pInfo); + return VK_SUCCESS; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation( + VmaAllocator allocator, + VmaDefragmentationContext context, + VmaDefragmentationStats* pStats) +{ + VMA_ASSERT(allocator && context); + + VMA_DEBUG_LOG("vmaEndDefragmentation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + if (pStats) + context->GetStats(*pStats); + vma_delete(allocator, context); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo) +{ + VMA_ASSERT(context && pPassInfo); + + VMA_DEBUG_LOG("vmaBeginDefragmentationPass"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return context->DefragmentPassBegin(*pPassInfo); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo) +{ + VMA_ASSERT(context && pPassInfo); + + VMA_DEBUG_LOG("vmaEndDefragmentationPass"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return context->DefragmentPassEnd(*pPassInfo); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory( + VmaAllocator allocator, + VmaAllocation allocation, + VkBuffer buffer) +{ + VMA_ASSERT(allocator && allocation && buffer); + + VMA_DEBUG_LOG("vmaBindBufferMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize allocationLocalOffset, + VkBuffer buffer, + const void* pNext) +{ + VMA_ASSERT(allocator && allocation && buffer); + + VMA_DEBUG_LOG("vmaBindBufferMemory2"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory( + VmaAllocator allocator, + VmaAllocation allocation, + VkImage image) +{ + VMA_ASSERT(allocator && allocation && image); + + VMA_DEBUG_LOG("vmaBindImageMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindImageMemory(allocation, 0, image, VMA_NULL); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize allocationLocalOffset, + VkImage image, + const void* pNext) +{ + VMA_ASSERT(allocator && allocation && image); + + VMA_DEBUG_LOG("vmaBindImageMemory2"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation); + + if(pBufferCreateInfo->size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && + !allocator->m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_LOG("vmaCreateBuffer"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *pBuffer = VK_NULL_HANDLE; + *pAllocation = VK_NULL_HANDLE; + + // 1. Create VkBuffer. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( + allocator->m_hDevice, + pBufferCreateInfo, + allocator->GetAllocationCallbacks(), + pBuffer); + if(res >= 0) + { + // 2. vkGetBufferMemoryRequirements. + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + // 3. Allocate memory using allocator. + res = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + *pBuffer, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + pBufferCreateInfo->usage, // dedicatedBufferImageUsage + *pAllocationCreateInfo, + VMA_SUBALLOCATION_TYPE_BUFFER, + 1, // allocationCount + pAllocation); + + if(res >= 0) + { + // 3. Bind buffer with memory. + if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) + { + res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL); + } + if(res >= 0) + { + // All steps succeeded. + #if VMA_STATS_STRING_ENABLED + (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage); + #endif + if(pAllocationInfo != VMA_NULL) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return VK_SUCCESS; + } + allocator->FreeMemory( + 1, // allocationCount + pAllocation); + *pAllocation = VK_NULL_HANDLE; + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkDeviceSize minAlignment, + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation); + + if(pBufferCreateInfo->size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && + !allocator->m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_LOG("vmaCreateBufferWithAlignment"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *pBuffer = VK_NULL_HANDLE; + *pAllocation = VK_NULL_HANDLE; + + // 1. Create VkBuffer. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( + allocator->m_hDevice, + pBufferCreateInfo, + allocator->GetAllocationCallbacks(), + pBuffer); + if(res >= 0) + { + // 2. vkGetBufferMemoryRequirements. + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + // 2a. Include minAlignment + vkMemReq.alignment = VMA_MAX(vkMemReq.alignment, minAlignment); + + // 3. Allocate memory using allocator. + res = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + *pBuffer, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + pBufferCreateInfo->usage, // dedicatedBufferImageUsage + *pAllocationCreateInfo, + VMA_SUBALLOCATION_TYPE_BUFFER, + 1, // allocationCount + pAllocation); + + if(res >= 0) + { + // 3. Bind buffer with memory. + if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) + { + res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL); + } + if(res >= 0) + { + // All steps succeeded. + #if VMA_STATS_STRING_ENABLED + (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage); + #endif + if(pAllocationInfo != VMA_NULL) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return VK_SUCCESS; + } + allocator->FreeMemory( + 1, // allocationCount + pAllocation); + *pAllocation = VK_NULL_HANDLE; + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer) +{ + VMA_ASSERT(allocator && pBufferCreateInfo && pBuffer && allocation); + + VMA_DEBUG_LOG("vmaCreateAliasingBuffer"); + + *pBuffer = VK_NULL_HANDLE; + + if (pBufferCreateInfo->size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + if ((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && + !allocator->m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + // 1. Create VkBuffer. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( + allocator->m_hDevice, + pBufferCreateInfo, + allocator->GetAllocationCallbacks(), + pBuffer); + if (res >= 0) + { + // 2. Bind buffer with memory. + res = allocator->BindBufferMemory(allocation, 0, *pBuffer, VMA_NULL); + if (res >= 0) + { + return VK_SUCCESS; + } + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + } + return res; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer( + VmaAllocator allocator, + VkBuffer buffer, + VmaAllocation allocation) +{ + VMA_ASSERT(allocator); + + if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaDestroyBuffer"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + if(buffer != VK_NULL_HANDLE) + { + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks()); + } + + if(allocation != VK_NULL_HANDLE) + { + allocator->FreeMemory( + 1, // allocationCount + &allocation); + } +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( + VmaAllocator allocator, + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkImage* pImage, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation); + + if(pImageCreateInfo->extent.width == 0 || + pImageCreateInfo->extent.height == 0 || + pImageCreateInfo->extent.depth == 0 || + pImageCreateInfo->mipLevels == 0 || + pImageCreateInfo->arrayLayers == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_LOG("vmaCreateImage"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *pImage = VK_NULL_HANDLE; + *pAllocation = VK_NULL_HANDLE; + + // 1. Create VkImage. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)( + allocator->m_hDevice, + pImageCreateInfo, + allocator->GetAllocationCallbacks(), + pImage); + if(res >= 0) + { + VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ? + VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL : + VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR; + + // 2. Allocate memory using allocator. + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetImageMemoryRequirements(*pImage, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + res = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + VK_NULL_HANDLE, // dedicatedBuffer + *pImage, // dedicatedImage + pImageCreateInfo->usage, // dedicatedBufferImageUsage + *pAllocationCreateInfo, + suballocType, + 1, // allocationCount + pAllocation); + + if(res >= 0) + { + // 3. Bind image with memory. + if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) + { + res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL); + } + if(res >= 0) + { + // All steps succeeded. + #if VMA_STATS_STRING_ENABLED + (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage); + #endif + if(pAllocationInfo != VMA_NULL) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return VK_SUCCESS; + } + allocator->FreeMemory( + 1, // allocationCount + pAllocation); + *pAllocation = VK_NULL_HANDLE; + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); + *pImage = VK_NULL_HANDLE; + return res; + } + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); + *pImage = VK_NULL_HANDLE; + return res; + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage) +{ + VMA_ASSERT(allocator && pImageCreateInfo && pImage && allocation); + + *pImage = VK_NULL_HANDLE; + + VMA_DEBUG_LOG("vmaCreateImage"); + + if (pImageCreateInfo->extent.width == 0 || + pImageCreateInfo->extent.height == 0 || + pImageCreateInfo->extent.depth == 0 || + pImageCreateInfo->mipLevels == 0 || + pImageCreateInfo->arrayLayers == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + // 1. Create VkImage. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)( + allocator->m_hDevice, + pImageCreateInfo, + allocator->GetAllocationCallbacks(), + pImage); + if (res >= 0) + { + // 2. Bind image with memory. + res = allocator->BindImageMemory(allocation, 0, *pImage, VMA_NULL); + if (res >= 0) + { + return VK_SUCCESS; + } + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); + } + return res; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NULLABLE_NON_DISPATCHABLE image, + VmaAllocation VMA_NULLABLE allocation) +{ + VMA_ASSERT(allocator); + + if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaDestroyImage"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + if(image != VK_NULL_HANDLE) + { + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks()); + } + if(allocation != VK_NULL_HANDLE) + { + allocator->FreeMemory( + 1, // allocationCount + &allocation); + } +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock( + const VmaVirtualBlockCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualBlock VMA_NULLABLE * VMA_NOT_NULL pVirtualBlock) +{ + VMA_ASSERT(pCreateInfo && pVirtualBlock); + VMA_ASSERT(pCreateInfo->size > 0); + VMA_DEBUG_LOG("vmaCreateVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + *pVirtualBlock = vma_new(pCreateInfo->pAllocationCallbacks, VmaVirtualBlock_T)(*pCreateInfo); + VkResult res = (*pVirtualBlock)->Init(); + if(res < 0) + { + vma_delete(pCreateInfo->pAllocationCallbacks, *pVirtualBlock); + *pVirtualBlock = VK_NULL_HANDLE; + } + return res; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock(VmaVirtualBlock VMA_NULLABLE virtualBlock) +{ + if(virtualBlock != VK_NULL_HANDLE) + { + VMA_DEBUG_LOG("vmaDestroyVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + VkAllocationCallbacks allocationCallbacks = virtualBlock->m_AllocationCallbacks; // Have to copy the callbacks when destroying. + vma_delete(&allocationCallbacks, virtualBlock); + } +} + +VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty(VmaVirtualBlock VMA_NOT_NULL virtualBlock) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaIsVirtualBlockEmpty"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return virtualBlock->IsEmpty() ? VK_TRUE : VK_FALSE; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo* VMA_NOT_NULL pVirtualAllocInfo) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pVirtualAllocInfo != VMA_NULL); + VMA_DEBUG_LOG("vmaGetVirtualAllocationInfo"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->GetAllocationInfo(allocation, *pVirtualAllocInfo); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + const VmaVirtualAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pAllocation, + VkDeviceSize* VMA_NULLABLE pOffset) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pCreateInfo != VMA_NULL && pAllocation != VMA_NULL); + VMA_DEBUG_LOG("vmaVirtualAllocate"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return virtualBlock->Allocate(*pCreateInfo, *pAllocation, pOffset); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation) +{ + if(allocation != VK_NULL_HANDLE) + { + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaVirtualFree"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->Free(allocation); + } +} + +VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock(VmaVirtualBlock VMA_NOT_NULL virtualBlock) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaClearVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->Clear(); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, void* VMA_NULLABLE pUserData) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaSetVirtualAllocationUserData"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->SetAllocationUserData(allocation, pUserData); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaStatistics* VMA_NOT_NULL pStats) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL); + VMA_DEBUG_LOG("vmaGetVirtualBlockStatistics"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->GetStatistics(*pStats); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaDetailedStatistics* VMA_NOT_NULL pStats) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL); + VMA_DEBUG_LOG("vmaCalculateVirtualBlockStatistics"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->CalculateDetailedStatistics(*pStats); +} + +#if VMA_STATS_STRING_ENABLED + +VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString, VkBool32 detailedMap) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && ppStatsString != VMA_NULL); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + const VkAllocationCallbacks* allocationCallbacks = virtualBlock->GetAllocationCallbacks(); + VmaStringBuilder sb(allocationCallbacks); + virtualBlock->BuildStatsString(detailedMap != VK_FALSE, sb); + *ppStatsString = VmaCreateStringCopy(allocationCallbacks, sb.GetData(), sb.GetLength()); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE pStatsString) +{ + if(pStatsString != VMA_NULL) + { + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + VmaFreeString(virtualBlock->GetAllocationCallbacks(), pStatsString); + } +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_PUBLIC_INTERFACE +#endif // VMA_IMPLEMENTATION + +/** +\page quick_start Quick start + +\section quick_start_project_setup Project setup + +Vulkan Memory Allocator comes in form of a "stb-style" single header file. +You don't need to build it as a separate library project. +You can add this file directly to your project and submit it to code repository next to your other source files. + +"Single header" doesn't mean that everything is contained in C/C++ declarations, +like it tends to be in case of inline functions or C++ templates. +It means that implementation is bundled with interface in a single file and needs to be extracted using preprocessor macro. +If you don't do it properly, you will get linker errors. + +To do it properly: + +-# Include "vk_mem_alloc.h" file in each CPP file where you want to use the library. + This includes declarations of all members of the library. +-# In exactly one CPP file define following macro before this include. + It enables also internal definitions. + +\code +#define VMA_IMPLEMENTATION +#include "vk_mem_alloc.h" +\endcode + +It may be a good idea to create dedicated CPP file just for this purpose. + +This library includes header ``, which in turn +includes `` on Windows. If you need some specific macros defined +before including these headers (like `WIN32_LEAN_AND_MEAN` or +`WINVER` for Windows, `VK_USE_PLATFORM_WIN32_KHR` for Vulkan), you must define +them before every `#include` of this library. + +This library is written in C++, but has C-compatible interface. +Thus you can include and use vk_mem_alloc.h in C or C++ code, but full +implementation with `VMA_IMPLEMENTATION` macro must be compiled as C++, NOT as C. +Some features of C++14 used. STL containers, RTTI, or C++ exceptions are not used. + + +\section quick_start_initialization Initialization + +At program startup: + +-# Initialize Vulkan to have `VkPhysicalDevice`, `VkDevice` and `VkInstance` object. +-# Fill VmaAllocatorCreateInfo structure and create #VmaAllocator object by + calling vmaCreateAllocator(). + +Only members `physicalDevice`, `device`, `instance` are required. +However, you should inform the library which Vulkan version do you use by setting +VmaAllocatorCreateInfo::vulkanApiVersion and which extensions did you enable +by setting VmaAllocatorCreateInfo::flags (like #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT for VK_KHR_buffer_device_address). +Otherwise, VMA would use only features of Vulkan 1.0 core with no extensions. + +You may need to configure importing Vulkan functions. There are 3 ways to do this: + +-# **If you link with Vulkan static library** (e.g. "vulkan-1.lib" on Windows): + - You don't need to do anything. + - VMA will use these, as macro `VMA_STATIC_VULKAN_FUNCTIONS` is defined to 1 by default. +-# **If you want VMA to fetch pointers to Vulkan functions dynamically** using `vkGetInstanceProcAddr`, + `vkGetDeviceProcAddr` (this is the option presented in the example below): + - Define `VMA_STATIC_VULKAN_FUNCTIONS` to 0, `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 1. + - Provide pointers to these two functions via VmaVulkanFunctions::vkGetInstanceProcAddr, + VmaVulkanFunctions::vkGetDeviceProcAddr. + - The library will fetch pointers to all other functions it needs internally. +-# **If you fetch pointers to all Vulkan functions in a custom way**, e.g. using some loader like + [Volk](https://github.com/zeux/volk): + - Define `VMA_STATIC_VULKAN_FUNCTIONS` and `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 0. + - Pass these pointers via structure #VmaVulkanFunctions. + +\code +VmaVulkanFunctions vulkanFunctions = {}; +vulkanFunctions.vkGetInstanceProcAddr = &vkGetInstanceProcAddr; +vulkanFunctions.vkGetDeviceProcAddr = &vkGetDeviceProcAddr; + +VmaAllocatorCreateInfo allocatorCreateInfo = {}; +allocatorCreateInfo.vulkanApiVersion = VK_API_VERSION_1_2; +allocatorCreateInfo.physicalDevice = physicalDevice; +allocatorCreateInfo.device = device; +allocatorCreateInfo.instance = instance; +allocatorCreateInfo.pVulkanFunctions = &vulkanFunctions; + +VmaAllocator allocator; +vmaCreateAllocator(&allocatorCreateInfo, &allocator); +\endcode + + +\section quick_start_resource_allocation Resource allocation + +When you want to create a buffer or image: + +-# Fill `VkBufferCreateInfo` / `VkImageCreateInfo` structure. +-# Fill VmaAllocationCreateInfo structure. +-# Call vmaCreateBuffer() / vmaCreateImage() to get `VkBuffer`/`VkImage` with memory + already allocated and bound to it, plus #VmaAllocation objects that represents its underlying memory. + +\code +VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufferInfo.size = 65536; +bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.usage = VMA_MEMORY_USAGE_AUTO; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +Don't forget to destroy your objects when no longer needed: + +\code +vmaDestroyBuffer(allocator, buffer, allocation); +vmaDestroyAllocator(allocator); +\endcode + + +\page choosing_memory_type Choosing memory type + +Physical devices in Vulkan support various combinations of memory heaps and +types. Help with choosing correct and optimal memory type for your specific +resource is one of the key features of this library. You can use it by filling +appropriate members of VmaAllocationCreateInfo structure, as described below. +You can also combine multiple methods. + +-# If you just want to find memory type index that meets your requirements, you + can use function: vmaFindMemoryTypeIndexForBufferInfo(), + vmaFindMemoryTypeIndexForImageInfo(), vmaFindMemoryTypeIndex(). +-# If you want to allocate a region of device memory without association with any + specific image or buffer, you can use function vmaAllocateMemory(). Usage of + this function is not recommended and usually not needed. + vmaAllocateMemoryPages() function is also provided for creating multiple allocations at once, + which may be useful for sparse binding. +-# If you already have a buffer or an image created, you want to allocate memory + for it and then you will bind it yourself, you can use function + vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(). + For binding you should use functions: vmaBindBufferMemory(), vmaBindImageMemory() + or their extended versions: vmaBindBufferMemory2(), vmaBindImageMemory2(). +-# **This is the easiest and recommended way to use this library:** + If you want to create a buffer or an image, allocate memory for it and bind + them together, all in one call, you can use function vmaCreateBuffer(), + vmaCreateImage(). + +When using 3. or 4., the library internally queries Vulkan for memory types +supported for that buffer or image (function `vkGetBufferMemoryRequirements()`) +and uses only one of these types. + +If no memory type can be found that meets all the requirements, these functions +return `VK_ERROR_FEATURE_NOT_PRESENT`. + +You can leave VmaAllocationCreateInfo structure completely filled with zeros. +It means no requirements are specified for memory type. +It is valid, although not very useful. + +\section choosing_memory_type_usage Usage + +The easiest way to specify memory requirements is to fill member +VmaAllocationCreateInfo::usage using one of the values of enum #VmaMemoryUsage. +It defines high level, common usage types. +Since version 3 of the library, it is recommended to use #VMA_MEMORY_USAGE_AUTO to let it select best memory type for your resource automatically. + +For example, if you want to create a uniform buffer that will be filled using +transfer only once or infrequently and then used for rendering every frame as a uniform buffer, you can +do it using following code. The buffer will most likely end up in a memory type with +`VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT` to be fast to access by the GPU device. + +\code +VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufferInfo.size = 65536; +bufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.usage = VMA_MEMORY_USAGE_AUTO; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +If you have a preference for putting the resource in GPU (device) memory or CPU (host) memory +on systems with discrete graphics card that have the memories separate, you can use +#VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE or #VMA_MEMORY_USAGE_AUTO_PREFER_HOST. + +When using `VMA_MEMORY_USAGE_AUTO*` while you want to map the allocated memory, +you also need to specify one of the host access flags: +#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +This will help the library decide about preferred memory type to ensure it has `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` +so you can map it. + +For example, a staging buffer that will be filled via mapped pointer and then +used as a source of transfer to the buffer decribed previously can be created like this. +It will likely and up in a memory type that is `HOST_VISIBLE` and `HOST_COHERENT` +but not `HOST_CACHED` (meaning uncached, write-combined) and not `DEVICE_LOCAL` (meaning system RAM). + +\code +VkBufferCreateInfo stagingBufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +stagingBufferInfo.size = 65536; +stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo stagingAllocInfo = {}; +stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO; +stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; + +VkBuffer stagingBuffer; +VmaAllocation stagingAllocation; +vmaCreateBuffer(allocator, &stagingBufferInfo, &stagingAllocInfo, &stagingBuffer, &stagingAllocation, nullptr); +\endcode + +For more examples of creating different kinds of resources, see chapter \ref usage_patterns. + +Usage values `VMA_MEMORY_USAGE_AUTO*` are legal to use only when the library knows +about the resource being created by having `VkBufferCreateInfo` / `VkImageCreateInfo` passed, +so they work with functions like: vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo() etc. +If you allocate raw memory using function vmaAllocateMemory(), you have to use other means of selecting +memory type, as decribed below. + +\note +Old usage values (`VMA_MEMORY_USAGE_GPU_ONLY`, `VMA_MEMORY_USAGE_CPU_ONLY`, +`VMA_MEMORY_USAGE_CPU_TO_GPU`, `VMA_MEMORY_USAGE_GPU_TO_CPU`, `VMA_MEMORY_USAGE_CPU_COPY`) +are still available and work same way as in previous versions of the library +for backward compatibility, but they are not recommended. + +\section choosing_memory_type_required_preferred_flags Required and preferred flags + +You can specify more detailed requirements by filling members +VmaAllocationCreateInfo::requiredFlags and VmaAllocationCreateInfo::preferredFlags +with a combination of bits from enum `VkMemoryPropertyFlags`. For example, +if you want to create a buffer that will be persistently mapped on host (so it +must be `HOST_VISIBLE`) and preferably will also be `HOST_COHERENT` and `HOST_CACHED`, +use following code: + +\code +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; +allocInfo.preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; +allocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT | VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +A memory type is chosen that has all the required flags and as many preferred +flags set as possible. + +Value passed in VmaAllocationCreateInfo::usage is internally converted to a set of required and preferred flags, +plus some extra "magic" (heuristics). + +\section choosing_memory_type_explicit_memory_types Explicit memory types + +If you inspected memory types available on the physical device and you have +a preference for memory types that you want to use, you can fill member +VmaAllocationCreateInfo::memoryTypeBits. It is a bit mask, where each bit set +means that a memory type with that index is allowed to be used for the +allocation. Special value 0, just like `UINT32_MAX`, means there are no +restrictions to memory type index. + +Please note that this member is NOT just a memory type index. +Still you can use it to choose just one, specific memory type. +For example, if you already determined that your buffer should be created in +memory type 2, use following code: + +\code +uint32_t memoryTypeIndex = 2; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.memoryTypeBits = 1u << memoryTypeIndex; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + + +\section choosing_memory_type_custom_memory_pools Custom memory pools + +If you allocate from custom memory pool, all the ways of specifying memory +requirements described above are not applicable and the aforementioned members +of VmaAllocationCreateInfo structure are ignored. Memory type is selected +explicitly when creating the pool and then used to make all the allocations from +that pool. For further details, see \ref custom_memory_pools. + +\section choosing_memory_type_dedicated_allocations Dedicated allocations + +Memory for allocations is reserved out of larger block of `VkDeviceMemory` +allocated from Vulkan internally. That is the main feature of this whole library. +You can still request a separate memory block to be created for an allocation, +just like you would do in a trivial solution without using any allocator. +In that case, a buffer or image is always bound to that memory at offset 0. +This is called a "dedicated allocation". +You can explicitly request it by using flag #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +The library can also internally decide to use dedicated allocation in some cases, e.g.: + +- When the size of the allocation is large. +- When [VK_KHR_dedicated_allocation](@ref vk_khr_dedicated_allocation) extension is enabled + and it reports that dedicated allocation is required or recommended for the resource. +- When allocation of next big memory block fails due to not enough device memory, + but allocation with the exact requested size succeeds. + + +\page memory_mapping Memory mapping + +To "map memory" in Vulkan means to obtain a CPU pointer to `VkDeviceMemory`, +to be able to read from it or write to it in CPU code. +Mapping is possible only of memory allocated from a memory type that has +`VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag. +Functions `vkMapMemory()`, `vkUnmapMemory()` are designed for this purpose. +You can use them directly with memory allocated by this library, +but it is not recommended because of following issue: +Mapping the same `VkDeviceMemory` block multiple times is illegal - only one mapping at a time is allowed. +This includes mapping disjoint regions. Mapping is not reference-counted internally by Vulkan. +Because of this, Vulkan Memory Allocator provides following facilities: + +\note If you want to be able to map an allocation, you need to specify one of the flags +#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT +in VmaAllocationCreateInfo::flags. These flags are required for an allocation to be mappable +when using #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` enum values. +For other usage values they are ignored and every such allocation made in `HOST_VISIBLE` memory type is mappable, +but they can still be used for consistency. + +\section memory_mapping_mapping_functions Mapping functions + +The library provides following functions for mapping of a specific #VmaAllocation: vmaMapMemory(), vmaUnmapMemory(). +They are safer and more convenient to use than standard Vulkan functions. +You can map an allocation multiple times simultaneously - mapping is reference-counted internally. +You can also map different allocations simultaneously regardless of whether they use the same `VkDeviceMemory` block. +The way it is implemented is that the library always maps entire memory block, not just region of the allocation. +For further details, see description of vmaMapMemory() function. +Example: + +\code +// Having these objects initialized: +struct ConstantBuffer +{ + ... +}; +ConstantBuffer constantBufferData = ... + +VmaAllocator allocator = ... +VkBuffer constantBuffer = ... +VmaAllocation constantBufferAllocation = ... + +// You can map and fill your buffer using following code: + +void* mappedData; +vmaMapMemory(allocator, constantBufferAllocation, &mappedData); +memcpy(mappedData, &constantBufferData, sizeof(constantBufferData)); +vmaUnmapMemory(allocator, constantBufferAllocation); +\endcode + +When mapping, you may see a warning from Vulkan validation layer similar to this one: + +Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used. + +It happens because the library maps entire `VkDeviceMemory` block, where different +types of images and buffers may end up together, especially on GPUs with unified memory like Intel. +You can safely ignore it if you are sure you access only memory of the intended +object that you wanted to map. + + +\section memory_mapping_persistently_mapped_memory Persistently mapped memory + +Kepping your memory persistently mapped is generally OK in Vulkan. +You don't need to unmap it before using its data on the GPU. +The library provides a special feature designed for that: +Allocations made with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag set in +VmaAllocationCreateInfo::flags stay mapped all the time, +so you can just access CPU pointer to it any time +without a need to call any "map" or "unmap" function. +Example: + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = sizeof(ConstantBuffer); +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +// Buffer is already mapped. You can access its memory. +memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData)); +\endcode + +\note #VMA_ALLOCATION_CREATE_MAPPED_BIT by itself doesn't guarantee that the allocation will end up +in a mappable memory type. +For this, you need to also specify #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or +#VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +#VMA_ALLOCATION_CREATE_MAPPED_BIT only guarantees that if the memory is `HOST_VISIBLE`, the allocation will be mapped on creation. +For an example of how to make use of this fact, see section \ref usage_patterns_advanced_data_uploading. + +\section memory_mapping_cache_control Cache flush and invalidate + +Memory in Vulkan doesn't need to be unmapped before using it on GPU, +but unless a memory types has `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT` flag set, +you need to manually **invalidate** cache before reading of mapped pointer +and **flush** cache after writing to mapped pointer. +Map/unmap operations don't do that automatically. +Vulkan provides following functions for this purpose `vkFlushMappedMemoryRanges()`, +`vkInvalidateMappedMemoryRanges()`, but this library provides more convenient +functions that refer to given allocation object: vmaFlushAllocation(), +vmaInvalidateAllocation(), +or multiple objects at once: vmaFlushAllocations(), vmaInvalidateAllocations(). + +Regions of memory specified for flush/invalidate must be aligned to +`VkPhysicalDeviceLimits::nonCoherentAtomSize`. This is automatically ensured by the library. +In any memory type that is `HOST_VISIBLE` but not `HOST_COHERENT`, all allocations +within blocks are aligned to this value, so their offsets are always multiply of +`nonCoherentAtomSize` and two different allocations never share same "line" of this size. + +Also, Windows drivers from all 3 PC GPU vendors (AMD, Intel, NVIDIA) +currently provide `HOST_COHERENT` flag on all memory types that are +`HOST_VISIBLE`, so on PC you may not need to bother. + + +\page staying_within_budget Staying within budget + +When developing a graphics-intensive game or program, it is important to avoid allocating +more GPU memory than it is physically available. When the memory is over-committed, +various bad things can happen, depending on the specific GPU, graphics driver, and +operating system: + +- It may just work without any problems. +- The application may slow down because some memory blocks are moved to system RAM + and the GPU has to access them through PCI Express bus. +- A new allocation may take very long time to complete, even few seconds, and possibly + freeze entire system. +- The new allocation may fail with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +- It may even result in GPU crash (TDR), observed as `VK_ERROR_DEVICE_LOST` + returned somewhere later. + +\section staying_within_budget_querying_for_budget Querying for budget + +To query for current memory usage and available budget, use function vmaGetHeapBudgets(). +Returned structure #VmaBudget contains quantities expressed in bytes, per Vulkan memory heap. + +Please note that this function returns different information and works faster than +vmaCalculateStatistics(). vmaGetHeapBudgets() can be called every frame or even before every +allocation, while vmaCalculateStatistics() is intended to be used rarely, +only to obtain statistical information, e.g. for debugging purposes. + +It is recommended to use VK_EXT_memory_budget device extension to obtain information +about the budget from Vulkan device. VMA is able to use this extension automatically. +When not enabled, the allocator behaves same way, but then it estimates current usage +and available budget based on its internal information and Vulkan memory heap sizes, +which may be less precise. In order to use this extension: + +1. Make sure extensions VK_EXT_memory_budget and VK_KHR_get_physical_device_properties2 + required by it are available and enable them. Please note that the first is a device + extension and the second is instance extension! +2. Use flag #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT when creating #VmaAllocator object. +3. Make sure to call vmaSetCurrentFrameIndex() every frame. Budget is queried from + Vulkan inside of it to avoid overhead of querying it with every allocation. + +\section staying_within_budget_controlling_memory_usage Controlling memory usage + +There are many ways in which you can try to stay within the budget. + +First, when making new allocation requires allocating a new memory block, the library +tries not to exceed the budget automatically. If a block with default recommended size +(e.g. 256 MB) would go over budget, a smaller block is allocated, possibly even +dedicated memory for just this resource. + +If the size of the requested resource plus current memory usage is more than the +budget, by default the library still tries to create it, leaving it to the Vulkan +implementation whether the allocation succeeds or fails. You can change this behavior +by using #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag. With it, the allocation is +not made if it would exceed the budget or if the budget is already exceeded. +VMA then tries to make the allocation from the next eligible Vulkan memory type. +The all of them fail, the call then fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +Example usage pattern may be to pass the #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag +when creating resources that are not essential for the application (e.g. the texture +of a specific object) and not to pass it when creating critically important resources +(e.g. render targets). + +On AMD graphics cards there is a custom vendor extension available: VK_AMD_memory_overallocation_behavior +that allows to control the behavior of the Vulkan implementation in out-of-memory cases - +whether it should fail with an error code or still allow the allocation. +Usage of this extension involves only passing extra structure on Vulkan device creation, +so it is out of scope of this library. + +Finally, you can also use #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT flag to make sure +a new allocation is created only when it fits inside one of the existing memory blocks. +If it would require to allocate a new block, if fails instead with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +This also ensures that the function call is very fast because it never goes to Vulkan +to obtain a new block. + +\note Creating \ref custom_memory_pools with VmaPoolCreateInfo::minBlockCount +set to more than 0 will currently try to allocate memory blocks without checking whether they +fit within budget. + + +\page resource_aliasing Resource aliasing (overlap) + +New explicit graphics APIs (Vulkan and Direct3D 12), thanks to manual memory +management, give an opportunity to alias (overlap) multiple resources in the +same region of memory - a feature not available in the old APIs (Direct3D 11, OpenGL). +It can be useful to save video memory, but it must be used with caution. + +For example, if you know the flow of your whole render frame in advance, you +are going to use some intermediate textures or buffers only during a small range of render passes, +and you know these ranges don't overlap in time, you can bind these resources to +the same place in memory, even if they have completely different parameters (width, height, format etc.). + +![Resource aliasing (overlap)](../gfx/Aliasing.png) + +Such scenario is possible using VMA, but you need to create your images manually. +Then you need to calculate parameters of an allocation to be made using formula: + +- allocation size = max(size of each image) +- allocation alignment = max(alignment of each image) +- allocation memoryTypeBits = bitwise AND(memoryTypeBits of each image) + +Following example shows two different images bound to the same place in memory, +allocated to fit largest of them. + +\code +// A 512x512 texture to be sampled. +VkImageCreateInfo img1CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +img1CreateInfo.imageType = VK_IMAGE_TYPE_2D; +img1CreateInfo.extent.width = 512; +img1CreateInfo.extent.height = 512; +img1CreateInfo.extent.depth = 1; +img1CreateInfo.mipLevels = 10; +img1CreateInfo.arrayLayers = 1; +img1CreateInfo.format = VK_FORMAT_R8G8B8A8_SRGB; +img1CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +img1CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +img1CreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; +img1CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +// A full screen texture to be used as color attachment. +VkImageCreateInfo img2CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +img2CreateInfo.imageType = VK_IMAGE_TYPE_2D; +img2CreateInfo.extent.width = 1920; +img2CreateInfo.extent.height = 1080; +img2CreateInfo.extent.depth = 1; +img2CreateInfo.mipLevels = 1; +img2CreateInfo.arrayLayers = 1; +img2CreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +img2CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +img2CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +img2CreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +img2CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +VkImage img1; +res = vkCreateImage(device, &img1CreateInfo, nullptr, &img1); +VkImage img2; +res = vkCreateImage(device, &img2CreateInfo, nullptr, &img2); + +VkMemoryRequirements img1MemReq; +vkGetImageMemoryRequirements(device, img1, &img1MemReq); +VkMemoryRequirements img2MemReq; +vkGetImageMemoryRequirements(device, img2, &img2MemReq); + +VkMemoryRequirements finalMemReq = {}; +finalMemReq.size = std::max(img1MemReq.size, img2MemReq.size); +finalMemReq.alignment = std::max(img1MemReq.alignment, img2MemReq.alignment); +finalMemReq.memoryTypeBits = img1MemReq.memoryTypeBits & img2MemReq.memoryTypeBits; +// Validate if(finalMemReq.memoryTypeBits != 0) + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + +VmaAllocation alloc; +res = vmaAllocateMemory(allocator, &finalMemReq, &allocCreateInfo, &alloc, nullptr); + +res = vmaBindImageMemory(allocator, alloc, img1); +res = vmaBindImageMemory(allocator, alloc, img2); + +// You can use img1, img2 here, but not at the same time! + +vmaFreeMemory(allocator, alloc); +vkDestroyImage(allocator, img2, nullptr); +vkDestroyImage(allocator, img1, nullptr); +\endcode + +Remember that using resources that alias in memory requires proper synchronization. +You need to issue a memory barrier to make sure commands that use `img1` and `img2` +don't overlap on GPU timeline. +You also need to treat a resource after aliasing as uninitialized - containing garbage data. +For example, if you use `img1` and then want to use `img2`, you need to issue +an image memory barrier for `img2` with `oldLayout` = `VK_IMAGE_LAYOUT_UNDEFINED`. + +Additional considerations: + +- Vulkan also allows to interpret contents of memory between aliasing resources consistently in some cases. +See chapter 11.8. "Memory Aliasing" of Vulkan specification or `VK_IMAGE_CREATE_ALIAS_BIT` flag. +- You can create more complex layout where different images and buffers are bound +at different offsets inside one large allocation. For example, one can imagine +a big texture used in some render passes, aliasing with a set of many small buffers +used between in some further passes. To bind a resource at non-zero offset in an allocation, +use vmaBindBufferMemory2() / vmaBindImageMemory2(). +- Before allocating memory for the resources you want to alias, check `memoryTypeBits` +returned in memory requirements of each resource to make sure the bits overlap. +Some GPUs may expose multiple memory types suitable e.g. only for buffers or +images with `COLOR_ATTACHMENT` usage, so the sets of memory types supported by your +resources may be disjoint. Aliasing them is not possible in that case. + + +\page custom_memory_pools Custom memory pools + +A memory pool contains a number of `VkDeviceMemory` blocks. +The library automatically creates and manages default pool for each memory type available on the device. +Default memory pool automatically grows in size. +Size of allocated blocks is also variable and managed automatically. + +You can create custom pool and allocate memory out of it. +It can be useful if you want to: + +- Keep certain kind of allocations separate from others. +- Enforce particular, fixed size of Vulkan memory blocks. +- Limit maximum amount of Vulkan memory allocated for that pool. +- Reserve minimum or fixed amount of Vulkan memory always preallocated for that pool. +- Use extra parameters for a set of your allocations that are available in #VmaPoolCreateInfo but not in + #VmaAllocationCreateInfo - e.g., custom minimum alignment, custom `pNext` chain. +- Perform defragmentation on a specific subset of your allocations. + +To use custom memory pools: + +-# Fill VmaPoolCreateInfo structure. +-# Call vmaCreatePool() to obtain #VmaPool handle. +-# When making an allocation, set VmaAllocationCreateInfo::pool to this handle. + You don't need to specify any other parameters of this structure, like `usage`. + +Example: + +\code +// Find memoryTypeIndex for the pool. +VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +sampleBufCreateInfo.size = 0x10000; // Doesn't matter. +sampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo sampleAllocCreateInfo = {}; +sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +uint32_t memTypeIndex; +VkResult res = vmaFindMemoryTypeIndexForBufferInfo(allocator, + &sampleBufCreateInfo, &sampleAllocCreateInfo, &memTypeIndex); +// Check res... + +// Create a pool that can have at most 2 blocks, 128 MiB each. +VmaPoolCreateInfo poolCreateInfo = {}; +poolCreateInfo.memoryTypeIndex = memTypeIndex; +poolCreateInfo.blockSize = 128ull * 1024 * 1024; +poolCreateInfo.maxBlockCount = 2; + +VmaPool pool; +res = vmaCreatePool(allocator, &poolCreateInfo, &pool); +// Check res... + +// Allocate a buffer out of it. +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 1024; +bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.pool = pool; + +VkBuffer buf; +VmaAllocation alloc; +res = vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr); +// Check res... +\endcode + +You have to free all allocations made from this pool before destroying it. + +\code +vmaDestroyBuffer(allocator, buf, alloc); +vmaDestroyPool(allocator, pool); +\endcode + +New versions of this library support creating dedicated allocations in custom pools. +It is supported only when VmaPoolCreateInfo::blockSize = 0. +To use this feature, set VmaAllocationCreateInfo::pool to the pointer to your custom pool and +VmaAllocationCreateInfo::flags to #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + +\note Excessive use of custom pools is a common mistake when using this library. +Custom pools may be useful for special purposes - when you want to +keep certain type of resources separate e.g. to reserve minimum amount of memory +for them or limit maximum amount of memory they can occupy. For most +resources this is not needed and so it is not recommended to create #VmaPool +objects and allocations out of them. Allocating from the default pool is sufficient. + + +\section custom_memory_pools_MemTypeIndex Choosing memory type index + +When creating a pool, you must explicitly specify memory type index. +To find the one suitable for your buffers or images, you can use helper functions +vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo(). +You need to provide structures with example parameters of buffers or images +that you are going to create in that pool. + +\code +VkBufferCreateInfo exampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +exampleBufCreateInfo.size = 1024; // Doesn't matter +exampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +uint32_t memTypeIndex; +vmaFindMemoryTypeIndexForBufferInfo(allocator, &exampleBufCreateInfo, &allocCreateInfo, &memTypeIndex); + +VmaPoolCreateInfo poolCreateInfo = {}; +poolCreateInfo.memoryTypeIndex = memTypeIndex; +// ... +\endcode + +When creating buffers/images allocated in that pool, provide following parameters: + +- `VkBufferCreateInfo`: Prefer to pass same parameters as above. + Otherwise you risk creating resources in a memory type that is not suitable for them, which may result in undefined behavior. + Using different `VK_BUFFER_USAGE_` flags may work, but you shouldn't create images in a pool intended for buffers + or the other way around. +- VmaAllocationCreateInfo: You don't need to pass same parameters. Fill only `pool` member. + Other members are ignored anyway. + +\section linear_algorithm Linear allocation algorithm + +Each Vulkan memory block managed by this library has accompanying metadata that +keeps track of used and unused regions. By default, the metadata structure and +algorithm tries to find best place for new allocations among free regions to +optimize memory usage. This way you can allocate and free objects in any order. + +![Default allocation algorithm](../gfx/Linear_allocator_1_algo_default.png) + +Sometimes there is a need to use simpler, linear allocation algorithm. You can +create custom pool that uses such algorithm by adding flag +#VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT to VmaPoolCreateInfo::flags while creating +#VmaPool object. Then an alternative metadata management is used. It always +creates new allocations after last one and doesn't reuse free regions after +allocations freed in the middle. It results in better allocation performance and +less memory consumed by metadata. + +![Linear allocation algorithm](../gfx/Linear_allocator_2_algo_linear.png) + +With this one flag, you can create a custom pool that can be used in many ways: +free-at-once, stack, double stack, and ring buffer. See below for details. +You don't need to specify explicitly which of these options you are going to use - it is detected automatically. + +\subsection linear_algorithm_free_at_once Free-at-once + +In a pool that uses linear algorithm, you still need to free all the allocations +individually, e.g. by using vmaFreeMemory() or vmaDestroyBuffer(). You can free +them in any order. New allocations are always made after last one - free space +in the middle is not reused. However, when you release all the allocation and +the pool becomes empty, allocation starts from the beginning again. This way you +can use linear algorithm to speed up creation of allocations that you are going +to release all at once. + +![Free-at-once](../gfx/Linear_allocator_3_free_at_once.png) + +This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount +value that allows multiple memory blocks. + +\subsection linear_algorithm_stack Stack + +When you free an allocation that was created last, its space can be reused. +Thanks to this, if you always release allocations in the order opposite to their +creation (LIFO - Last In First Out), you can achieve behavior of a stack. + +![Stack](../gfx/Linear_allocator_4_stack.png) + +This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount +value that allows multiple memory blocks. + +\subsection linear_algorithm_double_stack Double stack + +The space reserved by a custom pool with linear algorithm may be used by two +stacks: + +- First, default one, growing up from offset 0. +- Second, "upper" one, growing down from the end towards lower offsets. + +To make allocation from the upper stack, add flag #VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT +to VmaAllocationCreateInfo::flags. + +![Double stack](../gfx/Linear_allocator_7_double_stack.png) + +Double stack is available only in pools with one memory block - +VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined. + +When the two stacks' ends meet so there is not enough space between them for a +new allocation, such allocation fails with usual +`VK_ERROR_OUT_OF_DEVICE_MEMORY` error. + +\subsection linear_algorithm_ring_buffer Ring buffer + +When you free some allocations from the beginning and there is not enough free space +for a new one at the end of a pool, allocator's "cursor" wraps around to the +beginning and starts allocation there. Thanks to this, if you always release +allocations in the same order as you created them (FIFO - First In First Out), +you can achieve behavior of a ring buffer / queue. + +![Ring buffer](../gfx/Linear_allocator_5_ring_buffer.png) + +Ring buffer is available only in pools with one memory block - +VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined. + +\note \ref defragmentation is not supported in custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT. + + +\page defragmentation Defragmentation + +Interleaved allocations and deallocations of many objects of varying size can +cause fragmentation over time, which can lead to a situation where the library is unable +to find a continuous range of free memory for a new allocation despite there is +enough free space, just scattered across many small free ranges between existing +allocations. + +To mitigate this problem, you can use defragmentation feature. +It doesn't happen automatically though and needs your cooperation, +because VMA is a low level library that only allocates memory. +It cannot recreate buffers and images in a new place as it doesn't remember the contents of `VkBufferCreateInfo` / `VkImageCreateInfo` structures. +It cannot copy their contents as it doesn't record any commands to a command buffer. + +Example: + +\code +VmaDefragmentationInfo defragInfo = {}; +defragInfo.pool = myPool; +defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT; + +VmaDefragmentationContext defragCtx; +VkResult res = vmaBeginDefragmentation(allocator, &defragInfo, &defragCtx); +// Check res... + +for(;;) +{ + VmaDefragmentationPassMoveInfo pass; + res = vmaBeginDefragmentationPass(allocator, defragCtx, &pass); + if(res == VK_SUCCESS) + break; + else if(res != VK_INCOMPLETE) + // Handle error... + + for(uint32_t i = 0; i < pass.moveCount; ++i) + { + // Inspect pass.pMoves[i].srcAllocation, identify what buffer/image it represents. + VmaAllocationInfo allocInfo; + vmaGetAllocationInfo(allocator, pMoves[i].srcAllocation, &allocInfo); + MyEngineResourceData* resData = (MyEngineResourceData*)allocInfo.pUserData; + + // Recreate and bind this buffer/image at: pass.pMoves[i].dstMemory, pass.pMoves[i].dstOffset. + VkImageCreateInfo imgCreateInfo = ... + VkImage newImg; + res = vkCreateImage(device, &imgCreateInfo, nullptr, &newImg); + // Check res... + res = vmaBindImageMemory(allocator, pMoves[i].dstTmpAllocation, newImg); + // Check res... + + // Issue a vkCmdCopyBuffer/vkCmdCopyImage to copy its content to the new place. + vkCmdCopyImage(cmdBuf, resData->img, ..., newImg, ...); + } + + // Make sure the copy commands finished executing. + vkWaitForFences(...); + + // Destroy old buffers/images bound with pass.pMoves[i].srcAllocation. + for(uint32_t i = 0; i < pass.moveCount; ++i) + { + // ... + vkDestroyImage(device, resData->img, nullptr); + } + + // Update appropriate descriptors to point to the new places... + + res = vmaEndDefragmentationPass(allocator, defragCtx, &pass); + if(res == VK_SUCCESS) + break; + else if(res != VK_INCOMPLETE) + // Handle error... +} + +vmaEndDefragmentation(allocator, defragCtx, nullptr); +\endcode + +Although functions like vmaCreateBuffer(), vmaCreateImage(), vmaDestroyBuffer(), vmaDestroyImage() +create/destroy an allocation and a buffer/image at once, these are just a shortcut for +creating the resource, allocating memory, and binding them together. +Defragmentation works on memory allocations only. You must handle the rest manually. +Defragmentation is an iterative process that should repreat "passes" as long as related functions +return `VK_INCOMPLETE` not `VK_SUCCESS`. +In each pass: + +1. vmaBeginDefragmentationPass() function call: + - Calculates and returns the list of allocations to be moved in this pass. + Note this can be a time-consuming process. + - Reserves destination memory for them by creating temporary destination allocations + that you can query for their `VkDeviceMemory` + offset using vmaGetAllocationInfo(). +2. Inside the pass, **you should**: + - Inspect the returned list of allocations to be moved. + - Create new buffers/images and bind them at the returned destination temporary allocations. + - Copy data from source to destination resources if necessary. + - Destroy the source buffers/images, but NOT their allocations. +3. vmaEndDefragmentationPass() function call: + - Frees the source memory reserved for the allocations that are moved. + - Modifies source #VmaAllocation objects that are moved to point to the destination reserved memory. + - Frees `VkDeviceMemory` blocks that became empty. + +Unlike in previous iterations of the defragmentation API, there is no list of "movable" allocations passed as a parameter. +Defragmentation algorithm tries to move all suitable allocations. +You can, however, refuse to move some of them inside a defragmentation pass, by setting +`pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. +This is not recommended and may result in suboptimal packing of the allocations after defragmentation. +If you cannot ensure any allocation can be moved, it is better to keep movable allocations separate in a custom pool. + +Inside a pass, for each allocation that should be moved: + +- You should copy its data from the source to the destination place by calling e.g. `vkCmdCopyBuffer()`, `vkCmdCopyImage()`. + - You need to make sure these commands finished executing before destroying the source buffers/images and before calling vmaEndDefragmentationPass(). +- If a resource doesn't contain any meaningful data, e.g. it is a transient color attachment image to be cleared, + filled, and used temporarily in each rendering frame, you can just recreate this image + without copying its data. +- If the resource is in `HOST_VISIBLE` and `HOST_CACHED` memory, you can copy its data on the CPU + using `memcpy()`. +- If you cannot move the allocation, you can set `pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. + This will cancel the move. + - vmaEndDefragmentationPass() will then free the destination memory + not the source memory of the allocation, leaving it unchanged. +- If you decide the allocation is unimportant and can be destroyed instead of moved (e.g. it wasn't used for long time), + you can set `pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY. + - vmaEndDefragmentationPass() will then free both source and destination memory, and will destroy the source #VmaAllocation object. + +You can defragment a specific custom pool by setting VmaDefragmentationInfo::pool +(like in the example above) or all the default pools by setting this member to null. + +Defragmentation is always performed in each pool separately. +Allocations are never moved between different Vulkan memory types. +The size of the destination memory reserved for a moved allocation is the same as the original one. +Alignment of an allocation as it was determined using `vkGetBufferMemoryRequirements()` etc. is also respected after defragmentation. +Buffers/images should be recreated with the same `VkBufferCreateInfo` / `VkImageCreateInfo` parameters as the original ones. + +You can perform the defragmentation incrementally to limit the number of allocations and bytes to be moved +in each pass, e.g. to call it in sync with render frames and not to experience too big hitches. +See members: VmaDefragmentationInfo::maxBytesPerPass, VmaDefragmentationInfo::maxAllocationsPerPass. + +It is also safe to perform the defragmentation asynchronously to render frames and other Vulkan and VMA +usage, possibly from multiple threads, with the exception that allocations +returned in VmaDefragmentationPassMoveInfo::pMoves shouldn't be destroyed until the defragmentation pass is ended. + +Mapping is preserved on allocations that are moved during defragmentation. +Whether through #VMA_ALLOCATION_CREATE_MAPPED_BIT or vmaMapMemory(), the allocations +are mapped at their new place. Of course, pointer to the mapped data changes, so it needs to be queried +using VmaAllocationInfo::pMappedData. + +\note Defragmentation is not supported in custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT. + + +\page statistics Statistics + +This library contains several functions that return information about its internal state, +especially the amount of memory allocated from Vulkan. + +\section statistics_numeric_statistics Numeric statistics + +If you need to obtain basic statistics about memory usage per heap, together with current budget, +you can call function vmaGetHeapBudgets() and inspect structure #VmaBudget. +This is useful to keep track of memory usage and stay withing budget +(see also \ref staying_within_budget). +Example: + +\code +uint32_t heapIndex = ... + +VmaBudget budgets[VK_MAX_MEMORY_HEAPS]; +vmaGetHeapBudgets(allocator, budgets); + +printf("My heap currently has %u allocations taking %llu B,\n", + budgets[heapIndex].statistics.allocationCount, + budgets[heapIndex].statistics.allocationBytes); +printf("allocated out of %u Vulkan device memory blocks taking %llu B,\n", + budgets[heapIndex].statistics.blockCount, + budgets[heapIndex].statistics.blockBytes); +printf("Vulkan reports total usage %llu B with budget %llu B.\n", + budgets[heapIndex].usage, + budgets[heapIndex].budget); +\endcode + +You can query for more detailed statistics per memory heap, type, and totals, +including minimum and maximum allocation size and unused range size, +by calling function vmaCalculateStatistics() and inspecting structure #VmaTotalStatistics. +This function is slower though, as it has to traverse all the internal data structures, +so it should be used only for debugging purposes. + +You can query for statistics of a custom pool using function vmaGetPoolStatistics() +or vmaCalculatePoolStatistics(). + +You can query for information about a specific allocation using function vmaGetAllocationInfo(). +It fill structure #VmaAllocationInfo. + +\section statistics_json_dump JSON dump + +You can dump internal state of the allocator to a string in JSON format using function vmaBuildStatsString(). +The result is guaranteed to be correct JSON. +It uses ANSI encoding. +Any strings provided by user (see [Allocation names](@ref allocation_names)) +are copied as-is and properly escaped for JSON, so if they use UTF-8, ISO-8859-2 or any other encoding, +this JSON string can be treated as using this encoding. +It must be freed using function vmaFreeStatsString(). + +The format of this JSON string is not part of official documentation of the library, +but it will not change in backward-incompatible way without increasing library major version number +and appropriate mention in changelog. + +The JSON string contains all the data that can be obtained using vmaCalculateStatistics(). +It can also contain detailed map of allocated memory blocks and their regions - +free and occupied by allocations. +This allows e.g. to visualize the memory or assess fragmentation. + + +\page allocation_annotation Allocation names and user data + +\section allocation_user_data Allocation user data + +You can annotate allocations with your own information, e.g. for debugging purposes. +To do that, fill VmaAllocationCreateInfo::pUserData field when creating +an allocation. It is an opaque `void*` pointer. You can use it e.g. as a pointer, +some handle, index, key, ordinal number or any other value that would associate +the allocation with your custom metadata. +It it useful to identify appropriate data structures in your engine given #VmaAllocation, +e.g. when doing \ref defragmentation. + +\code +VkBufferCreateInfo bufCreateInfo = ... + +MyBufferMetadata* pMetadata = CreateBufferMetadata(); + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.pUserData = pMetadata; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buffer, &allocation, nullptr); +\endcode + +The pointer may be later retrieved as VmaAllocationInfo::pUserData: + +\code +VmaAllocationInfo allocInfo; +vmaGetAllocationInfo(allocator, allocation, &allocInfo); +MyBufferMetadata* pMetadata = (MyBufferMetadata*)allocInfo.pUserData; +\endcode + +It can also be changed using function vmaSetAllocationUserData(). + +Values of (non-zero) allocations' `pUserData` are printed in JSON report created by +vmaBuildStatsString() in hexadecimal form. + +\section allocation_names Allocation names + +An allocation can also carry a null-terminated string, giving a name to the allocation. +To set it, call vmaSetAllocationName(). +The library creates internal copy of the string, so the pointer you pass doesn't need +to be valid for whole lifetime of the allocation. You can free it after the call. + +\code +std::string imageName = "Texture: "; +imageName += fileName; +vmaSetAllocationName(allocator, allocation, imageName.c_str()); +\endcode + +The string can be later retrieved by inspecting VmaAllocationInfo::pName. +It is also printed in JSON report created by vmaBuildStatsString(). + +\note Setting string name to VMA allocation doesn't automatically set it to the Vulkan buffer or image created with it. +You must do it manually using an extension like VK_EXT_debug_utils, which is independent of this library. + + +\page virtual_allocator Virtual allocator + +As an extra feature, the core allocation algorithm of the library is exposed through a simple and convenient API of "virtual allocator". +It doesn't allocate any real GPU memory. It just keeps track of used and free regions of a "virtual block". +You can use it to allocate your own memory or other objects, even completely unrelated to Vulkan. +A common use case is sub-allocation of pieces of one large GPU buffer. + +\section virtual_allocator_creating_virtual_block Creating virtual block + +To use this functionality, there is no main "allocator" object. +You don't need to have #VmaAllocator object created. +All you need to do is to create a separate #VmaVirtualBlock object for each block of memory you want to be managed by the allocator: + +-# Fill in #VmaVirtualBlockCreateInfo structure. +-# Call vmaCreateVirtualBlock(). Get new #VmaVirtualBlock object. + +Example: + +\code +VmaVirtualBlockCreateInfo blockCreateInfo = {}; +blockCreateInfo.size = 1048576; // 1 MB + +VmaVirtualBlock block; +VkResult res = vmaCreateVirtualBlock(&blockCreateInfo, &block); +\endcode + +\section virtual_allocator_making_virtual_allocations Making virtual allocations + +#VmaVirtualBlock object contains internal data structure that keeps track of free and occupied regions +using the same code as the main Vulkan memory allocator. +Similarly to #VmaAllocation for standard GPU allocations, there is #VmaVirtualAllocation type +that represents an opaque handle to an allocation withing the virtual block. + +In order to make such allocation: + +-# Fill in #VmaVirtualAllocationCreateInfo structure. +-# Call vmaVirtualAllocate(). Get new #VmaVirtualAllocation object that represents the allocation. + You can also receive `VkDeviceSize offset` that was assigned to the allocation. + +Example: + +\code +VmaVirtualAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.size = 4096; // 4 KB + +VmaVirtualAllocation alloc; +VkDeviceSize offset; +res = vmaVirtualAllocate(block, &allocCreateInfo, &alloc, &offset); +if(res == VK_SUCCESS) +{ + // Use the 4 KB of your memory starting at offset. +} +else +{ + // Allocation failed - no space for it could be found. Handle this error! +} +\endcode + +\section virtual_allocator_deallocation Deallocation + +When no longer needed, an allocation can be freed by calling vmaVirtualFree(). +You can only pass to this function an allocation that was previously returned by vmaVirtualAllocate() +called for the same #VmaVirtualBlock. + +When whole block is no longer needed, the block object can be released by calling vmaDestroyVirtualBlock(). +All allocations must be freed before the block is destroyed, which is checked internally by an assert. +However, if you don't want to call vmaVirtualFree() for each allocation, you can use vmaClearVirtualBlock() to free them all at once - +a feature not available in normal Vulkan memory allocator. Example: + +\code +vmaVirtualFree(block, alloc); +vmaDestroyVirtualBlock(block); +\endcode + +\section virtual_allocator_allocation_parameters Allocation parameters + +You can attach a custom pointer to each allocation by using vmaSetVirtualAllocationUserData(). +Its default value is null. +It can be used to store any data that needs to be associated with that allocation - e.g. an index, a handle, or a pointer to some +larger data structure containing more information. Example: + +\code +struct CustomAllocData +{ + std::string m_AllocName; +}; +CustomAllocData* allocData = new CustomAllocData(); +allocData->m_AllocName = "My allocation 1"; +vmaSetVirtualAllocationUserData(block, alloc, allocData); +\endcode + +The pointer can later be fetched, along with allocation offset and size, by passing the allocation handle to function +vmaGetVirtualAllocationInfo() and inspecting returned structure #VmaVirtualAllocationInfo. +If you allocated a new object to be used as the custom pointer, don't forget to delete that object before freeing the allocation! +Example: + +\code +VmaVirtualAllocationInfo allocInfo; +vmaGetVirtualAllocationInfo(block, alloc, &allocInfo); +delete (CustomAllocData*)allocInfo.pUserData; + +vmaVirtualFree(block, alloc); +\endcode + +\section virtual_allocator_alignment_and_units Alignment and units + +It feels natural to express sizes and offsets in bytes. +If an offset of an allocation needs to be aligned to a multiply of some number (e.g. 4 bytes), you can fill optional member +VmaVirtualAllocationCreateInfo::alignment to request it. Example: + +\code +VmaVirtualAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.size = 4096; // 4 KB +allocCreateInfo.alignment = 4; // Returned offset must be a multiply of 4 B + +VmaVirtualAllocation alloc; +res = vmaVirtualAllocate(block, &allocCreateInfo, &alloc, nullptr); +\endcode + +Alignments of different allocations made from one block may vary. +However, if all alignments and sizes are always multiply of some size e.g. 4 B or `sizeof(MyDataStruct)`, +you can express all sizes, alignments, and offsets in multiples of that size instead of individual bytes. +It might be more convenient, but you need to make sure to use this new unit consistently in all the places: + +- VmaVirtualBlockCreateInfo::size +- VmaVirtualAllocationCreateInfo::size and VmaVirtualAllocationCreateInfo::alignment +- Using offset returned by vmaVirtualAllocate() or in VmaVirtualAllocationInfo::offset + +\section virtual_allocator_statistics Statistics + +You can obtain statistics of a virtual block using vmaGetVirtualBlockStatistics() +(to get brief statistics that are fast to calculate) +or vmaCalculateVirtualBlockStatistics() (to get more detailed statistics, slower to calculate). +The functions fill structures #VmaStatistics, #VmaDetailedStatistics respectively - same as used by the normal Vulkan memory allocator. +Example: + +\code +VmaStatistics stats; +vmaGetVirtualBlockStatistics(block, &stats); +printf("My virtual block has %llu bytes used by %u virtual allocations\n", + stats.allocationBytes, stats.allocationCount); +\endcode + +You can also request a full list of allocations and free regions as a string in JSON format by calling +vmaBuildVirtualBlockStatsString(). +Returned string must be later freed using vmaFreeVirtualBlockStatsString(). +The format of this string differs from the one returned by the main Vulkan allocator, but it is similar. + +\section virtual_allocator_additional_considerations Additional considerations + +The "virtual allocator" functionality is implemented on a level of individual memory blocks. +Keeping track of a whole collection of blocks, allocating new ones when out of free space, +deleting empty ones, and deciding which one to try first for a new allocation must be implemented by the user. + +Alternative allocation algorithms are supported, just like in custom pools of the real GPU memory. +See enum #VmaVirtualBlockCreateFlagBits to learn how to specify them (e.g. #VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT). +You can find their description in chapter \ref custom_memory_pools. +Allocation strategies are also supported. +See enum #VmaVirtualAllocationCreateFlagBits to learn how to specify them (e.g. #VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT). + +Following features are supported only by the allocator of the real GPU memory and not by virtual allocations: +buffer-image granularity, `VMA_DEBUG_MARGIN`, `VMA_MIN_ALIGNMENT`. + + +\page debugging_memory_usage Debugging incorrect memory usage + +If you suspect a bug with memory usage, like usage of uninitialized memory or +memory being overwritten out of bounds of an allocation, +you can use debug features of this library to verify this. + +\section debugging_memory_usage_initialization Memory initialization + +If you experience a bug with incorrect and nondeterministic data in your program and you suspect uninitialized memory to be used, +you can enable automatic memory initialization to verify this. +To do it, define macro `VMA_DEBUG_INITIALIZE_ALLOCATIONS` to 1. + +\code +#define VMA_DEBUG_INITIALIZE_ALLOCATIONS 1 +#include "vk_mem_alloc.h" +\endcode + +It makes memory of new allocations initialized to bit pattern `0xDCDCDCDC`. +Before an allocation is destroyed, its memory is filled with bit pattern `0xEFEFEFEF`. +Memory is automatically mapped and unmapped if necessary. + +If you find these values while debugging your program, good chances are that you incorrectly +read Vulkan memory that is allocated but not initialized, or already freed, respectively. + +Memory initialization works only with memory types that are `HOST_VISIBLE` and with allocations that can be mapped. +It works also with dedicated allocations. + +\section debugging_memory_usage_margins Margins + +By default, allocations are laid out in memory blocks next to each other if possible +(considering required alignment, `bufferImageGranularity`, and `nonCoherentAtomSize`). + +![Allocations without margin](../gfx/Margins_1.png) + +Define macro `VMA_DEBUG_MARGIN` to some non-zero value (e.g. 16) to enforce specified +number of bytes as a margin after every allocation. + +\code +#define VMA_DEBUG_MARGIN 16 +#include "vk_mem_alloc.h" +\endcode + +![Allocations with margin](../gfx/Margins_2.png) + +If your bug goes away after enabling margins, it means it may be caused by memory +being overwritten outside of allocation boundaries. It is not 100% certain though. +Change in application behavior may also be caused by different order and distribution +of allocations across memory blocks after margins are applied. + +Margins work with all types of memory. + +Margin is applied only to allocations made out of memory blocks and not to dedicated +allocations, which have their own memory block of specific size. +It is thus not applied to allocations made using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT flag +or those automatically decided to put into dedicated allocations, e.g. due to its +large size or recommended by VK_KHR_dedicated_allocation extension. + +Margins appear in [JSON dump](@ref statistics_json_dump) as part of free space. + +Note that enabling margins increases memory usage and fragmentation. + +Margins do not apply to \ref virtual_allocator. + +\section debugging_memory_usage_corruption_detection Corruption detection + +You can additionally define macro `VMA_DEBUG_DETECT_CORRUPTION` to 1 to enable validation +of contents of the margins. + +\code +#define VMA_DEBUG_MARGIN 16 +#define VMA_DEBUG_DETECT_CORRUPTION 1 +#include "vk_mem_alloc.h" +\endcode + +When this feature is enabled, number of bytes specified as `VMA_DEBUG_MARGIN` +(it must be multiply of 4) after every allocation is filled with a magic number. +This idea is also know as "canary". +Memory is automatically mapped and unmapped if necessary. + +This number is validated automatically when the allocation is destroyed. +If it is not equal to the expected value, `VMA_ASSERT()` is executed. +It clearly means that either CPU or GPU overwritten the memory outside of boundaries of the allocation, +which indicates a serious bug. + +You can also explicitly request checking margins of all allocations in all memory blocks +that belong to specified memory types by using function vmaCheckCorruption(), +or in memory blocks that belong to specified custom pool, by using function +vmaCheckPoolCorruption(). + +Margin validation (corruption detection) works only for memory types that are +`HOST_VISIBLE` and `HOST_COHERENT`. + + +\page opengl_interop OpenGL Interop + +VMA provides some features that help with interoperability with OpenGL. + +\section opengl_interop_exporting_memory Exporting memory + +If you want to attach `VkExportMemoryAllocateInfoKHR` structure to `pNext` chain of memory allocations made by the library: + +It is recommended to create \ref custom_memory_pools for such allocations. +Define and fill in your `VkExportMemoryAllocateInfoKHR` structure and attach it to VmaPoolCreateInfo::pMemoryAllocateNext +while creating the custom pool. +Please note that the structure must remain alive and unchanged for the whole lifetime of the #VmaPool, +not only while creating it, as no copy of the structure is made, +but its original pointer is used for each allocation instead. + +If you want to export all memory allocated by the library from certain memory types, +also dedicated allocations or other allocations made from default pools, +an alternative solution is to fill in VmaAllocatorCreateInfo::pTypeExternalMemoryHandleTypes. +It should point to an array with `VkExternalMemoryHandleTypeFlagsKHR` to be automatically passed by the library +through `VkExportMemoryAllocateInfoKHR` on each allocation made from a specific memory type. +Please note that new versions of the library also support dedicated allocations created in custom pools. + +You should not mix these two methods in a way that allows to apply both to the same memory type. +Otherwise, `VkExportMemoryAllocateInfoKHR` structure would be attached twice to the `pNext` chain of `VkMemoryAllocateInfo`. + + +\section opengl_interop_custom_alignment Custom alignment + +Buffers or images exported to a different API like OpenGL may require a different alignment, +higher than the one used by the library automatically, queried from functions like `vkGetBufferMemoryRequirements`. +To impose such alignment: + +It is recommended to create \ref custom_memory_pools for such allocations. +Set VmaPoolCreateInfo::minAllocationAlignment member to the minimum alignment required for each allocation +to be made out of this pool. +The alignment actually used will be the maximum of this member and the alignment returned for the specific buffer or image +from a function like `vkGetBufferMemoryRequirements`, which is called by VMA automatically. + +If you want to create a buffer with a specific minimum alignment out of default pools, +use special function vmaCreateBufferWithAlignment(), which takes additional parameter `minAlignment`. + +Note the problem of alignment affects only resources placed inside bigger `VkDeviceMemory` blocks and not dedicated +allocations, as these, by definition, always have alignment = 0 because the resource is bound to the beginning of its dedicated block. +Contrary to Direct3D 12, Vulkan doesn't have a concept of alignment of the entire memory block passed on its allocation. + + +\page usage_patterns Recommended usage patterns + +Vulkan gives great flexibility in memory allocation. +This chapter shows the most common patterns. + +See also slides from talk: +[Sawicki, Adam. Advanced Graphics Techniques Tutorial: Memory management in Vulkan and DX12. Game Developers Conference, 2018](https://www.gdcvault.com/play/1025458/Advanced-Graphics-Techniques-Tutorial-New) + + +\section usage_patterns_gpu_only GPU-only resource + +When: +Any resources that you frequently write and read on GPU, +e.g. images used as color attachments (aka "render targets"), depth-stencil attachments, +images/buffers used as storage image/buffer (aka "Unordered Access View (UAV)"). + +What to do: +Let the library select the optimal memory type, which will likely have `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + +\code +VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +imgCreateInfo.imageType = VK_IMAGE_TYPE_2D; +imgCreateInfo.extent.width = 3840; +imgCreateInfo.extent.height = 2160; +imgCreateInfo.extent.depth = 1; +imgCreateInfo.mipLevels = 1; +imgCreateInfo.arrayLayers = 1; +imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; +allocCreateInfo.priority = 1.0f; + +VkImage img; +VmaAllocation alloc; +vmaCreateImage(allocator, &imgCreateInfo, &allocCreateInfo, &img, &alloc, nullptr); +\endcode + +Also consider: +Consider creating them as dedicated allocations using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT, +especially if they are large or if you plan to destroy and recreate them with different sizes +e.g. when display resolution changes. +Prefer to create such resources first and all other GPU resources (like textures and vertex buffers) later. +When VK_EXT_memory_priority extension is enabled, it is also worth setting high priority to such allocation +to decrease chances to be evicted to system memory by the operating system. + +\section usage_patterns_staging_copy_upload Staging copy for upload + +When: +A "staging" buffer than you want to map and fill from CPU code, then use as a source od transfer +to some GPU resource. + +What to do: +Use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT. +Let the library select the optimal memory type, which will always have `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +... + +memcpy(allocInfo.pMappedData, myData, myDataSize); +\endcode + +Also consider: +You can map the allocation using vmaMapMemory() or you can create it as persistenly mapped +using #VMA_ALLOCATION_CREATE_MAPPED_BIT, as in the example above. + + +\section usage_patterns_readback Readback + +When: +Buffers for data written by or transferred from the GPU that you want to read back on the CPU, +e.g. results of some computations. + +What to do: +Use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +Let the library select the optimal memory type, which will always have `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` +and `VK_MEMORY_PROPERTY_HOST_CACHED_BIT`. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +... + +const float* downloadedData = (const float*)allocInfo.pMappedData; +\endcode + + +\section usage_patterns_advanced_data_uploading Advanced data uploading + +For resources that you frequently write on CPU via mapped pointer and +freqnently read on GPU e.g. as a uniform buffer (also called "dynamic"), multiple options are possible: + +-# Easiest solution is to have one copy of the resource in `HOST_VISIBLE` memory, + even if it means system RAM (not `DEVICE_LOCAL`) on systems with a discrete graphics card, + and make the device reach out to that resource directly. + - Reads performed by the device will then go through PCI Express bus. + The performace of this access may be limited, but it may be fine depending on the size + of this resource (whether it is small enough to quickly end up in GPU cache) and the sparsity + of access. +-# On systems with unified memory (e.g. AMD APU or Intel integrated graphics, mobile chips), + a memory type may be available that is both `HOST_VISIBLE` (available for mapping) and `DEVICE_LOCAL` + (fast to access from the GPU). Then, it is likely the best choice for such type of resource. +-# Systems with a discrete graphics card and separate video memory may or may not expose + a memory type that is both `HOST_VISIBLE` and `DEVICE_LOCAL`, also known as Base Address Register (BAR). + If they do, it represents a piece of VRAM (or entire VRAM, if ReBAR is enabled in the motherboard BIOS) + that is available to CPU for mapping. + - Writes performed by the host to that memory go through PCI Express bus. + The performance of these writes may be limited, but it may be fine, especially on PCIe 4.0, + as long as rules of using uncached and write-combined memory are followed - only sequential writes and no reads. +-# Finally, you may need or prefer to create a separate copy of the resource in `DEVICE_LOCAL` memory, + a separate "staging" copy in `HOST_VISIBLE` memory and perform an explicit transfer command between them. + +Thankfully, VMA offers an aid to create and use such resources in the the way optimal +for the current Vulkan device. To help the library make the best choice, +use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT together with +#VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT. +It will then prefer a memory type that is both `DEVICE_LOCAL` and `HOST_VISIBLE` (integrated memory or BAR), +but if no such memory type is available or allocation from it fails +(PC graphics cards have only 256 MB of BAR by default, unless ReBAR is supported and enabled in BIOS), +it will fall back to `DEVICE_LOCAL` memory for fast GPU access. +It is then up to you to detect that the allocation ended up in a memory type that is not `HOST_VISIBLE`, +so you need to create another "staging" allocation and perform explicit transfers. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +VkMemoryPropertyFlags memPropFlags; +vmaGetAllocationMemoryProperties(allocator, alloc, &memPropFlags); + +if(memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) +{ + // Allocation ended up in a mappable memory and is already mapped - write to it directly. + + // [Executed in runtime]: + memcpy(allocInfo.pMappedData, myData, myDataSize); +} +else +{ + // Allocation ended up in a non-mappable memory - need to transfer. + VkBufferCreateInfo stagingBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; + stagingBufCreateInfo.size = 65536; + stagingBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + + VmaAllocationCreateInfo stagingAllocCreateInfo = {}; + stagingAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + stagingAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + + VkBuffer stagingBuf; + VmaAllocation stagingAlloc; + VmaAllocationInfo stagingAllocInfo; + vmaCreateBuffer(allocator, &stagingBufCreateInfo, &stagingAllocCreateInfo, + &stagingBuf, &stagingAlloc, stagingAllocInfo); + + // [Executed in runtime]: + memcpy(stagingAllocInfo.pMappedData, myData, myDataSize); + //vkCmdPipelineBarrier: VK_ACCESS_HOST_WRITE_BIT --> VK_ACCESS_TRANSFER_READ_BIT + VkBufferCopy bufCopy = { + 0, // srcOffset + 0, // dstOffset, + myDataSize); // size + vkCmdCopyBuffer(cmdBuf, stagingBuf, buf, 1, &bufCopy); +} +\endcode + +\section usage_patterns_other_use_cases Other use cases + +Here are some other, less obvious use cases and their recommended settings: + +- An image that is used only as transfer source and destination, but it should stay on the device, + as it is used to temporarily store a copy of some texture, e.g. from the current to the next frame, + for temporal antialiasing or other temporal effects. + - Use `VkImageCreateInfo::usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO +- An image that is used only as transfer source and destination, but it should be placed + in the system RAM despite it doesn't need to be mapped, because it serves as a "swap" copy to evict + least recently used textures from VRAM. + - Use `VkImageCreateInfo::usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO_PREFER_HOST, + as VMA needs a hint here to differentiate from the previous case. +- A buffer that you want to map and write from the CPU, directly read from the GPU + (e.g. as a uniform or vertex buffer), but you have a clear preference to place it in device or + host memory due to its large size. + - Use `VkBufferCreateInfo::usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE or #VMA_MEMORY_USAGE_AUTO_PREFER_HOST + - Use VmaAllocationCreateInfo::flags = #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT + + +\page configuration Configuration + +Please check "CONFIGURATION SECTION" in the code to find macros that you can define +before each include of this file or change directly in this file to provide +your own implementation of basic facilities like assert, `min()` and `max()` functions, +mutex, atomic etc. +The library uses its own implementation of containers by default, but you can switch to using +STL containers instead. + +For example, define `VMA_ASSERT(expr)` before including the library to provide +custom implementation of the assertion, compatible with your project. +By default it is defined to standard C `assert(expr)` in `_DEBUG` configuration +and empty otherwise. + +\section config_Vulkan_functions Pointers to Vulkan functions + +There are multiple ways to import pointers to Vulkan functions in the library. +In the simplest case you don't need to do anything. +If the compilation or linking of your program or the initialization of the #VmaAllocator +doesn't work for you, you can try to reconfigure it. + +First, the allocator tries to fetch pointers to Vulkan functions linked statically, +like this: + +\code +m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory; +\endcode + +If you want to disable this feature, set configuration macro: `#define VMA_STATIC_VULKAN_FUNCTIONS 0`. + +Second, you can provide the pointers yourself by setting member VmaAllocatorCreateInfo::pVulkanFunctions. +You can fetch them e.g. using functions `vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` or +by using a helper library like [volk](https://github.com/zeux/volk). + +Third, VMA tries to fetch remaining pointers that are still null by calling +`vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` on its own. +You need to only fill in VmaVulkanFunctions::vkGetInstanceProcAddr and VmaVulkanFunctions::vkGetDeviceProcAddr. +Other pointers will be fetched automatically. +If you want to disable this feature, set configuration macro: `#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0`. + +Finally, all the function pointers required by the library (considering selected +Vulkan version and enabled extensions) are checked with `VMA_ASSERT` if they are not null. + + +\section custom_memory_allocator Custom host memory allocator + +If you use custom allocator for CPU memory rather than default operator `new` +and `delete` from C++, you can make this library using your allocator as well +by filling optional member VmaAllocatorCreateInfo::pAllocationCallbacks. These +functions will be passed to Vulkan, as well as used by the library itself to +make any CPU-side allocations. + +\section allocation_callbacks Device memory allocation callbacks + +The library makes calls to `vkAllocateMemory()` and `vkFreeMemory()` internally. +You can setup callbacks to be informed about these calls, e.g. for the purpose +of gathering some statistics. To do it, fill optional member +VmaAllocatorCreateInfo::pDeviceMemoryCallbacks. + +\section heap_memory_limit Device heap memory limit + +When device memory of certain heap runs out of free space, new allocations may +fail (returning error code) or they may succeed, silently pushing some existing_ +memory blocks from GPU VRAM to system RAM (which degrades performance). This +behavior is implementation-dependent - it depends on GPU vendor and graphics +driver. + +On AMD cards it can be controlled while creating Vulkan device object by using +VK_AMD_memory_overallocation_behavior extension, if available. + +Alternatively, if you want to test how your program behaves with limited amount of Vulkan device +memory available without switching your graphics card to one that really has +smaller VRAM, you can use a feature of this library intended for this purpose. +To do it, fill optional member VmaAllocatorCreateInfo::pHeapSizeLimit. + + + +\page vk_khr_dedicated_allocation VK_KHR_dedicated_allocation + +VK_KHR_dedicated_allocation is a Vulkan extension which can be used to improve +performance on some GPUs. It augments Vulkan API with possibility to query +driver whether it prefers particular buffer or image to have its own, dedicated +allocation (separate `VkDeviceMemory` block) for better efficiency - to be able +to do some internal optimizations. The extension is supported by this library. +It will be used automatically when enabled. + +It has been promoted to core Vulkan 1.1, so if you use eligible Vulkan version +and inform VMA about it by setting VmaAllocatorCreateInfo::vulkanApiVersion, +you are all set. + +Otherwise, if you want to use it as an extension: + +1 . When creating Vulkan device, check if following 2 device extensions are +supported (call `vkEnumerateDeviceExtensionProperties()`). +If yes, enable them (fill `VkDeviceCreateInfo::ppEnabledExtensionNames`). + +- VK_KHR_get_memory_requirements2 +- VK_KHR_dedicated_allocation + +If you enabled these extensions: + +2 . Use #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag when creating +your #VmaAllocator to inform the library that you enabled required extensions +and you want the library to use them. + +\code +allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT; + +vmaCreateAllocator(&allocatorInfo, &allocator); +\endcode + +That is all. The extension will be automatically used whenever you create a +buffer using vmaCreateBuffer() or image using vmaCreateImage(). + +When using the extension together with Vulkan Validation Layer, you will receive +warnings like this: + +_vkBindBufferMemory(): Binding memory to buffer 0x33 but vkGetBufferMemoryRequirements() has not been called on that buffer._ + +It is OK, you should just ignore it. It happens because you use function +`vkGetBufferMemoryRequirements2KHR()` instead of standard +`vkGetBufferMemoryRequirements()`, while the validation layer seems to be +unaware of it. + +To learn more about this extension, see: + +- [VK_KHR_dedicated_allocation in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap50.html#VK_KHR_dedicated_allocation) +- [VK_KHR_dedicated_allocation unofficial manual](http://asawicki.info/articles/VK_KHR_dedicated_allocation.php5) + + + +\page vk_ext_memory_priority VK_EXT_memory_priority + +VK_EXT_memory_priority is a device extension that allows to pass additional "priority" +value to Vulkan memory allocations that the implementation may use prefer certain +buffers and images that are critical for performance to stay in device-local memory +in cases when the memory is over-subscribed, while some others may be moved to the system memory. + +VMA offers convenient usage of this extension. +If you enable it, you can pass "priority" parameter when creating allocations or custom pools +and the library automatically passes the value to Vulkan using this extension. + +If you want to use this extension in connection with VMA, follow these steps: + +\section vk_ext_memory_priority_initialization Initialization + +1) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_EXT_memory_priority". + +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceMemoryPriorityFeaturesEXT` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceMemoryPriorityFeaturesEXT::memoryPriority` is true. + +3) While creating device with `vkCreateDevice`, enable this extension - add "VK_EXT_memory_priority" +to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. + +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceMemoryPriorityFeaturesEXT` to +`VkPhysicalDeviceFeatures2::pNext` chain and set its member `memoryPriority` to `VK_TRUE`. + +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT +to VmaAllocatorCreateInfo::flags. + +\section vk_ext_memory_priority_usage Usage + +When using this extension, you should initialize following member: + +- VmaAllocationCreateInfo::priority when creating a dedicated allocation with #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +- VmaPoolCreateInfo::priority when creating a custom pool. + +It should be a floating-point value between `0.0f` and `1.0f`, where recommended default is `0.5f`. +Memory allocated with higher value can be treated by the Vulkan implementation as higher priority +and so it can have lower chances of being pushed out to system memory, experiencing degraded performance. + +It might be a good idea to create performance-critical resources like color-attachment or depth-stencil images +as dedicated and set high priority to them. For example: + +\code +VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +imgCreateInfo.imageType = VK_IMAGE_TYPE_2D; +imgCreateInfo.extent.width = 3840; +imgCreateInfo.extent.height = 2160; +imgCreateInfo.extent.depth = 1; +imgCreateInfo.mipLevels = 1; +imgCreateInfo.arrayLayers = 1; +imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; +allocCreateInfo.priority = 1.0f; + +VkImage img; +VmaAllocation alloc; +vmaCreateImage(allocator, &imgCreateInfo, &allocCreateInfo, &img, &alloc, nullptr); +\endcode + +`priority` member is ignored in the following situations: + +- Allocations created in custom pools: They inherit the priority, along with all other allocation parameters + from the parametrs passed in #VmaPoolCreateInfo when the pool was created. +- Allocations created in default pools: They inherit the priority from the parameters + VMA used when creating default pools, which means `priority == 0.5f`. + + +\page vk_amd_device_coherent_memory VK_AMD_device_coherent_memory + +VK_AMD_device_coherent_memory is a device extension that enables access to +additional memory types with `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and +`VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flag. It is useful mostly for +allocation of buffers intended for writing "breadcrumb markers" in between passes +or draw calls, which in turn are useful for debugging GPU crash/hang/TDR cases. + +When the extension is available but has not been enabled, Vulkan physical device +still exposes those memory types, but their usage is forbidden. VMA automatically +takes care of that - it returns `VK_ERROR_FEATURE_NOT_PRESENT` when an attempt +to allocate memory of such type is made. + +If you want to use this extension in connection with VMA, follow these steps: + +\section vk_amd_device_coherent_memory_initialization Initialization + +1) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_AMD_device_coherent_memory". + +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true. + +3) While creating device with `vkCreateDevice`, enable this extension - add "VK_AMD_device_coherent_memory" +to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. + +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to +`VkPhysicalDeviceFeatures2::pNext` and set its member `deviceCoherentMemory` to `VK_TRUE`. + +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT +to VmaAllocatorCreateInfo::flags. + +\section vk_amd_device_coherent_memory_usage Usage + +After following steps described above, you can create VMA allocations and custom pools +out of the special `DEVICE_COHERENT` and `DEVICE_UNCACHED` memory types on eligible +devices. There are multiple ways to do it, for example: + +- You can request or prefer to allocate out of such memory types by adding + `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` to VmaAllocationCreateInfo::requiredFlags + or VmaAllocationCreateInfo::preferredFlags. Those flags can be freely mixed with + other ways of \ref choosing_memory_type, like setting VmaAllocationCreateInfo::usage. +- If you manually found memory type index to use for this purpose, force allocation + from this specific index by setting VmaAllocationCreateInfo::memoryTypeBits `= 1u << index`. + +\section vk_amd_device_coherent_memory_more_information More information + +To learn more about this extension, see [VK_AMD_device_coherent_memory in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VK_AMD_device_coherent_memory.html) + +Example use of this extension can be found in the code of the sample and test suite +accompanying this library. + + +\page enabling_buffer_device_address Enabling buffer device address + +Device extension VK_KHR_buffer_device_address +allow to fetch raw GPU pointer to a buffer and pass it for usage in a shader code. +It has been promoted to core Vulkan 1.2. + +If you want to use this feature in connection with VMA, follow these steps: + +\section enabling_buffer_device_address_initialization Initialization + +1) (For Vulkan version < 1.2) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains +"VK_KHR_buffer_device_address". + +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceBufferDeviceAddressFeatures*` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress` is true. + +3) (For Vulkan version < 1.2) While creating device with `vkCreateDevice`, enable this extension - add +"VK_KHR_buffer_device_address" to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. + +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceBufferDeviceAddressFeatures*` to +`VkPhysicalDeviceFeatures2::pNext` and set its member `bufferDeviceAddress` to `VK_TRUE`. + +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this feature - add #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT +to VmaAllocatorCreateInfo::flags. + +\section enabling_buffer_device_address_usage Usage + +After following steps described above, you can create buffers with `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT*` using VMA. +The library automatically adds `VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT*` to +allocated memory blocks wherever it might be needed. + +Please note that the library supports only `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT*`. +The second part of this functionality related to "capture and replay" is not supported, +as it is intended for usage in debugging tools like RenderDoc, not in everyday Vulkan usage. + +\section enabling_buffer_device_address_more_information More information + +To learn more about this extension, see [VK_KHR_buffer_device_address in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap46.html#VK_KHR_buffer_device_address) + +Example use of this extension can be found in the code of the sample and test suite +accompanying this library. + +\page general_considerations General considerations + +\section general_considerations_thread_safety Thread safety + +- The library has no global state, so separate #VmaAllocator objects can be used + independently. + There should be no need to create multiple such objects though - one per `VkDevice` is enough. +- By default, all calls to functions that take #VmaAllocator as first parameter + are safe to call from multiple threads simultaneously because they are + synchronized internally when needed. + This includes allocation and deallocation from default memory pool, as well as custom #VmaPool. +- When the allocator is created with #VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT + flag, calls to functions that take such #VmaAllocator object must be + synchronized externally. +- Access to a #VmaAllocation object must be externally synchronized. For example, + you must not call vmaGetAllocationInfo() and vmaMapMemory() from different + threads at the same time if you pass the same #VmaAllocation object to these + functions. +- #VmaVirtualBlock is not safe to be used from multiple threads simultaneously. + +\section general_considerations_versioning_and_compatibility Versioning and compatibility + +The library uses [**Semantic Versioning**](https://semver.org/), +which means version numbers follow convention: Major.Minor.Patch (e.g. 2.3.0), where: + +- Incremented Patch version means a release is backward- and forward-compatible, + introducing only some internal improvements, bug fixes, optimizations etc. + or changes that are out of scope of the official API described in this documentation. +- Incremented Minor version means a release is backward-compatible, + so existing code that uses the library should continue to work, while some new + symbols could have been added: new structures, functions, new values in existing + enums and bit flags, new structure members, but not new function parameters. +- Incrementing Major version means a release could break some backward compatibility. + +All changes between official releases are documented in file "CHANGELOG.md". + +\warning Backward compatiblity is considered on the level of C++ source code, not binary linkage. +Adding new members to existing structures is treated as backward compatible if initializing +the new members to binary zero results in the old behavior. +You should always fully initialize all library structures to zeros and not rely on their +exact binary size. + +\section general_considerations_validation_layer_warnings Validation layer warnings + +When using this library, you can meet following types of warnings issued by +Vulkan validation layer. They don't necessarily indicate a bug, so you may need +to just ignore them. + +- *vkBindBufferMemory(): Binding memory to buffer 0xeb8e4 but vkGetBufferMemoryRequirements() has not been called on that buffer.* + - It happens when VK_KHR_dedicated_allocation extension is enabled. + `vkGetBufferMemoryRequirements2KHR` function is used instead, while validation layer seems to be unaware of it. +- *Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.* + - It happens when you map a buffer or image, because the library maps entire + `VkDeviceMemory` block, where different types of images and buffers may end + up together, especially on GPUs with unified memory like Intel. +- *Non-linear image 0xebc91 is aliased with linear buffer 0xeb8e4 which may indicate a bug.* + - It may happen when you use [defragmentation](@ref defragmentation). + +\section general_considerations_allocation_algorithm Allocation algorithm + +The library uses following algorithm for allocation, in order: + +-# Try to find free range of memory in existing blocks. +-# If failed, try to create a new block of `VkDeviceMemory`, with preferred block size. +-# If failed, try to create such block with size / 2, size / 4, size / 8. +-# If failed, try to allocate separate `VkDeviceMemory` for this allocation, + just like when you use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +-# If failed, choose other memory type that meets the requirements specified in + VmaAllocationCreateInfo and go to point 1. +-# If failed, return `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + +\section general_considerations_features_not_supported Features not supported + +Features deliberately excluded from the scope of this library: + +-# **Data transfer.** Uploading (streaming) and downloading data of buffers and images + between CPU and GPU memory and related synchronization is responsibility of the user. + Defining some "texture" object that would automatically stream its data from a + staging copy in CPU memory to GPU memory would rather be a feature of another, + higher-level library implemented on top of VMA. + VMA doesn't record any commands to a `VkCommandBuffer`. It just allocates memory. +-# **Recreation of buffers and images.** Although the library has functions for + buffer and image creation: vmaCreateBuffer(), vmaCreateImage(), you need to + recreate these objects yourself after defragmentation. That is because the big + structures `VkBufferCreateInfo`, `VkImageCreateInfo` are not stored in + #VmaAllocation object. +-# **Handling CPU memory allocation failures.** When dynamically creating small C++ + objects in CPU memory (not Vulkan memory), allocation failures are not checked + and handled gracefully, because that would complicate code significantly and + is usually not needed in desktop PC applications anyway. + Success of an allocation is just checked with an assert. +-# **Code free of any compiler warnings.** Maintaining the library to compile and + work correctly on so many different platforms is hard enough. Being free of + any warnings, on any version of any compiler, is simply not feasible. + There are many preprocessor macros that make some variables unused, function parameters unreferenced, + or conditional expressions constant in some configurations. + The code of this library should not be bigger or more complicated just to silence these warnings. + It is recommended to disable such warnings instead. +-# This is a C++ library with C interface. **Bindings or ports to any other programming languages** are welcome as external projects but + are not going to be included into this repository. +*/ diff --git a/MacroLibX/third_party/volk.c b/MacroLibX/third_party/volk.c new file mode 100644 index 0000000..03b6486 --- /dev/null +++ b/MacroLibX/third_party/volk.c @@ -0,0 +1,3041 @@ +/* This file is part of volk library; see volk.h for version/license details */ +/* clang-format off */ +#include "volk.h" + +#ifdef _WIN32 + typedef const char* LPCSTR; + typedef struct HINSTANCE__* HINSTANCE; + typedef HINSTANCE HMODULE; + #if defined(_MINWINDEF_) + /* minwindef.h defines FARPROC, and attempting to redefine it may conflict with -Wstrict-prototypes */ + #elif defined(_WIN64) + typedef __int64 (__stdcall* FARPROC)(void); + #else + typedef int (__stdcall* FARPROC)(void); + #endif +#else +# include +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +#ifdef _WIN32 +__declspec(dllimport) HMODULE __stdcall LoadLibraryA(LPCSTR); +__declspec(dllimport) FARPROC __stdcall GetProcAddress(HMODULE, LPCSTR); +__declspec(dllimport) int __stdcall FreeLibrary(HMODULE); +#endif + +static void* loadedModule = NULL; +static VkInstance loadedInstance = VK_NULL_HANDLE; +static VkDevice loadedDevice = VK_NULL_HANDLE; + +static void volkGenLoadLoader(void* context, PFN_vkVoidFunction (*load)(void*, const char*)); +static void volkGenLoadInstance(void* context, PFN_vkVoidFunction (*load)(void*, const char*)); +static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, const char*)); +static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context, PFN_vkVoidFunction (*load)(void*, const char*)); + +static PFN_vkVoidFunction vkGetInstanceProcAddrStub(void* context, const char* name) +{ + return vkGetInstanceProcAddr((VkInstance)context, name); +} + +static PFN_vkVoidFunction vkGetDeviceProcAddrStub(void* context, const char* name) +{ + return vkGetDeviceProcAddr((VkDevice)context, name); +} + +static PFN_vkVoidFunction nullProcAddrStub(void* context, const char* name) +{ + (void)context; + (void)name; + return NULL; +} + +VkResult volkInitialize(void) +{ +#if defined(_WIN32) + HMODULE module = LoadLibraryA("vulkan-1.dll"); + if (!module) + return VK_ERROR_INITIALIZATION_FAILED; + + // note: function pointer is cast through void function pointer to silence cast-function-type warning on gcc8 + vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)(void(*)(void))GetProcAddress(module, "vkGetInstanceProcAddr"); +#elif defined(__APPLE__) + void* module = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); + if (!module) + module = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); + if (!module) + module = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); + if (!module) + return VK_ERROR_INITIALIZATION_FAILED; + + vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)dlsym(module, "vkGetInstanceProcAddr"); +#else + void* module = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL); + if (!module) + module = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL); + if (!module) + return VK_ERROR_INITIALIZATION_FAILED; + + vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)dlsym(module, "vkGetInstanceProcAddr"); +#endif + + loadedModule = module; + volkGenLoadLoader(NULL, vkGetInstanceProcAddrStub); + + return VK_SUCCESS; +} + +void volkInitializeCustom(PFN_vkGetInstanceProcAddr handler) +{ + vkGetInstanceProcAddr = handler; + + loadedModule = NULL; + volkGenLoadLoader(NULL, vkGetInstanceProcAddrStub); +} + +void volkFinalize(void) +{ + if (loadedModule) + { +#if defined(_WIN32) + FreeLibrary((HMODULE)loadedModule); +#else + dlclose(loadedModule); +#endif + } + + vkGetInstanceProcAddr = NULL; + volkGenLoadLoader(NULL, nullProcAddrStub); + volkGenLoadInstance(NULL, nullProcAddrStub); + volkGenLoadDevice(NULL, nullProcAddrStub); + + loadedModule = NULL; + loadedInstance = VK_NULL_HANDLE; + loadedDevice = VK_NULL_HANDLE; +} + +uint32_t volkGetInstanceVersion(void) +{ +#if defined(VK_VERSION_1_1) + uint32_t apiVersion = 0; + if (vkEnumerateInstanceVersion && vkEnumerateInstanceVersion(&apiVersion) == VK_SUCCESS) + return apiVersion; +#endif + + if (vkCreateInstance) + return VK_API_VERSION_1_0; + + return 0; +} + +void volkLoadInstance(VkInstance instance) +{ + loadedInstance = instance; + volkGenLoadInstance(instance, vkGetInstanceProcAddrStub); + volkGenLoadDevice(instance, vkGetInstanceProcAddrStub); +} + +void volkLoadInstanceOnly(VkInstance instance) +{ + loadedInstance = instance; + volkGenLoadInstance(instance, vkGetInstanceProcAddrStub); +} + +VkInstance volkGetLoadedInstance(void) +{ + return loadedInstance; +} + +void volkLoadDevice(VkDevice device) +{ + loadedDevice = device; + volkGenLoadDevice(device, vkGetDeviceProcAddrStub); +} + +VkDevice volkGetLoadedDevice(void) +{ + return loadedDevice; +} + +void volkLoadDeviceTable(struct VolkDeviceTable* table, VkDevice device) +{ + volkGenLoadDeviceTable(table, device, vkGetDeviceProcAddrStub); +} + +static void volkGenLoadLoader(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) +{ + /* VOLK_GENERATE_LOAD_LOADER */ +#if defined(VK_VERSION_1_0) + vkCreateInstance = (PFN_vkCreateInstance)load(context, "vkCreateInstance"); + vkEnumerateInstanceExtensionProperties = (PFN_vkEnumerateInstanceExtensionProperties)load(context, "vkEnumerateInstanceExtensionProperties"); + vkEnumerateInstanceLayerProperties = (PFN_vkEnumerateInstanceLayerProperties)load(context, "vkEnumerateInstanceLayerProperties"); +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_VERSION_1_1) + vkEnumerateInstanceVersion = (PFN_vkEnumerateInstanceVersion)load(context, "vkEnumerateInstanceVersion"); +#endif /* defined(VK_VERSION_1_1) */ + /* VOLK_GENERATE_LOAD_LOADER */ +} + +static void volkGenLoadInstance(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) +{ + /* VOLK_GENERATE_LOAD_INSTANCE */ +#if defined(VK_VERSION_1_0) + vkCreateDevice = (PFN_vkCreateDevice)load(context, "vkCreateDevice"); + vkDestroyInstance = (PFN_vkDestroyInstance)load(context, "vkDestroyInstance"); + vkEnumerateDeviceExtensionProperties = (PFN_vkEnumerateDeviceExtensionProperties)load(context, "vkEnumerateDeviceExtensionProperties"); + vkEnumerateDeviceLayerProperties = (PFN_vkEnumerateDeviceLayerProperties)load(context, "vkEnumerateDeviceLayerProperties"); + vkEnumeratePhysicalDevices = (PFN_vkEnumeratePhysicalDevices)load(context, "vkEnumeratePhysicalDevices"); + vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)load(context, "vkGetDeviceProcAddr"); + vkGetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures)load(context, "vkGetPhysicalDeviceFeatures"); + vkGetPhysicalDeviceFormatProperties = (PFN_vkGetPhysicalDeviceFormatProperties)load(context, "vkGetPhysicalDeviceFormatProperties"); + vkGetPhysicalDeviceImageFormatProperties = (PFN_vkGetPhysicalDeviceImageFormatProperties)load(context, "vkGetPhysicalDeviceImageFormatProperties"); + vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)load(context, "vkGetPhysicalDeviceMemoryProperties"); + vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)load(context, "vkGetPhysicalDeviceProperties"); + vkGetPhysicalDeviceQueueFamilyProperties = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)load(context, "vkGetPhysicalDeviceQueueFamilyProperties"); + vkGetPhysicalDeviceSparseImageFormatProperties = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties)load(context, "vkGetPhysicalDeviceSparseImageFormatProperties"); +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_VERSION_1_1) + vkEnumeratePhysicalDeviceGroups = (PFN_vkEnumeratePhysicalDeviceGroups)load(context, "vkEnumeratePhysicalDeviceGroups"); + vkGetPhysicalDeviceExternalBufferProperties = (PFN_vkGetPhysicalDeviceExternalBufferProperties)load(context, "vkGetPhysicalDeviceExternalBufferProperties"); + vkGetPhysicalDeviceExternalFenceProperties = (PFN_vkGetPhysicalDeviceExternalFenceProperties)load(context, "vkGetPhysicalDeviceExternalFenceProperties"); + vkGetPhysicalDeviceExternalSemaphoreProperties = (PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)load(context, "vkGetPhysicalDeviceExternalSemaphoreProperties"); + vkGetPhysicalDeviceFeatures2 = (PFN_vkGetPhysicalDeviceFeatures2)load(context, "vkGetPhysicalDeviceFeatures2"); + vkGetPhysicalDeviceFormatProperties2 = (PFN_vkGetPhysicalDeviceFormatProperties2)load(context, "vkGetPhysicalDeviceFormatProperties2"); + vkGetPhysicalDeviceImageFormatProperties2 = (PFN_vkGetPhysicalDeviceImageFormatProperties2)load(context, "vkGetPhysicalDeviceImageFormatProperties2"); + vkGetPhysicalDeviceMemoryProperties2 = (PFN_vkGetPhysicalDeviceMemoryProperties2)load(context, "vkGetPhysicalDeviceMemoryProperties2"); + vkGetPhysicalDeviceProperties2 = (PFN_vkGetPhysicalDeviceProperties2)load(context, "vkGetPhysicalDeviceProperties2"); + vkGetPhysicalDeviceQueueFamilyProperties2 = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2)load(context, "vkGetPhysicalDeviceQueueFamilyProperties2"); + vkGetPhysicalDeviceSparseImageFormatProperties2 = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)load(context, "vkGetPhysicalDeviceSparseImageFormatProperties2"); +#endif /* defined(VK_VERSION_1_1) */ +#if defined(VK_VERSION_1_3) + vkGetPhysicalDeviceToolProperties = (PFN_vkGetPhysicalDeviceToolProperties)load(context, "vkGetPhysicalDeviceToolProperties"); +#endif /* defined(VK_VERSION_1_3) */ +#if defined(VK_EXT_acquire_drm_display) + vkAcquireDrmDisplayEXT = (PFN_vkAcquireDrmDisplayEXT)load(context, "vkAcquireDrmDisplayEXT"); + vkGetDrmDisplayEXT = (PFN_vkGetDrmDisplayEXT)load(context, "vkGetDrmDisplayEXT"); +#endif /* defined(VK_EXT_acquire_drm_display) */ +#if defined(VK_EXT_acquire_xlib_display) + vkAcquireXlibDisplayEXT = (PFN_vkAcquireXlibDisplayEXT)load(context, "vkAcquireXlibDisplayEXT"); + vkGetRandROutputDisplayEXT = (PFN_vkGetRandROutputDisplayEXT)load(context, "vkGetRandROutputDisplayEXT"); +#endif /* defined(VK_EXT_acquire_xlib_display) */ +#if defined(VK_EXT_calibrated_timestamps) + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = (PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)load(context, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT"); +#endif /* defined(VK_EXT_calibrated_timestamps) */ +#if defined(VK_EXT_debug_report) + vkCreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT)load(context, "vkCreateDebugReportCallbackEXT"); + vkDebugReportMessageEXT = (PFN_vkDebugReportMessageEXT)load(context, "vkDebugReportMessageEXT"); + vkDestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT)load(context, "vkDestroyDebugReportCallbackEXT"); +#endif /* defined(VK_EXT_debug_report) */ +#if defined(VK_EXT_debug_utils) + vkCmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT)load(context, "vkCmdBeginDebugUtilsLabelEXT"); + vkCmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT)load(context, "vkCmdEndDebugUtilsLabelEXT"); + vkCmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT)load(context, "vkCmdInsertDebugUtilsLabelEXT"); + vkCreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT)load(context, "vkCreateDebugUtilsMessengerEXT"); + vkDestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT)load(context, "vkDestroyDebugUtilsMessengerEXT"); + vkQueueBeginDebugUtilsLabelEXT = (PFN_vkQueueBeginDebugUtilsLabelEXT)load(context, "vkQueueBeginDebugUtilsLabelEXT"); + vkQueueEndDebugUtilsLabelEXT = (PFN_vkQueueEndDebugUtilsLabelEXT)load(context, "vkQueueEndDebugUtilsLabelEXT"); + vkQueueInsertDebugUtilsLabelEXT = (PFN_vkQueueInsertDebugUtilsLabelEXT)load(context, "vkQueueInsertDebugUtilsLabelEXT"); + vkSetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)load(context, "vkSetDebugUtilsObjectNameEXT"); + vkSetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT)load(context, "vkSetDebugUtilsObjectTagEXT"); + vkSubmitDebugUtilsMessageEXT = (PFN_vkSubmitDebugUtilsMessageEXT)load(context, "vkSubmitDebugUtilsMessageEXT"); +#endif /* defined(VK_EXT_debug_utils) */ +#if defined(VK_EXT_direct_mode_display) + vkReleaseDisplayEXT = (PFN_vkReleaseDisplayEXT)load(context, "vkReleaseDisplayEXT"); +#endif /* defined(VK_EXT_direct_mode_display) */ +#if defined(VK_EXT_directfb_surface) + vkCreateDirectFBSurfaceEXT = (PFN_vkCreateDirectFBSurfaceEXT)load(context, "vkCreateDirectFBSurfaceEXT"); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = (PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)load(context, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT"); +#endif /* defined(VK_EXT_directfb_surface) */ +#if defined(VK_EXT_display_surface_counter) + vkGetPhysicalDeviceSurfaceCapabilities2EXT = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)load(context, "vkGetPhysicalDeviceSurfaceCapabilities2EXT"); +#endif /* defined(VK_EXT_display_surface_counter) */ +#if defined(VK_EXT_full_screen_exclusive) + vkGetPhysicalDeviceSurfacePresentModes2EXT = (PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)load(context, "vkGetPhysicalDeviceSurfacePresentModes2EXT"); +#endif /* defined(VK_EXT_full_screen_exclusive) */ +#if defined(VK_EXT_headless_surface) + vkCreateHeadlessSurfaceEXT = (PFN_vkCreateHeadlessSurfaceEXT)load(context, "vkCreateHeadlessSurfaceEXT"); +#endif /* defined(VK_EXT_headless_surface) */ +#if defined(VK_EXT_metal_surface) + vkCreateMetalSurfaceEXT = (PFN_vkCreateMetalSurfaceEXT)load(context, "vkCreateMetalSurfaceEXT"); +#endif /* defined(VK_EXT_metal_surface) */ +#if defined(VK_EXT_sample_locations) + vkGetPhysicalDeviceMultisamplePropertiesEXT = (PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)load(context, "vkGetPhysicalDeviceMultisamplePropertiesEXT"); +#endif /* defined(VK_EXT_sample_locations) */ +#if defined(VK_EXT_tooling_info) + vkGetPhysicalDeviceToolPropertiesEXT = (PFN_vkGetPhysicalDeviceToolPropertiesEXT)load(context, "vkGetPhysicalDeviceToolPropertiesEXT"); +#endif /* defined(VK_EXT_tooling_info) */ +#if defined(VK_FUCHSIA_imagepipe_surface) + vkCreateImagePipeSurfaceFUCHSIA = (PFN_vkCreateImagePipeSurfaceFUCHSIA)load(context, "vkCreateImagePipeSurfaceFUCHSIA"); +#endif /* defined(VK_FUCHSIA_imagepipe_surface) */ +#if defined(VK_GGP_stream_descriptor_surface) + vkCreateStreamDescriptorSurfaceGGP = (PFN_vkCreateStreamDescriptorSurfaceGGP)load(context, "vkCreateStreamDescriptorSurfaceGGP"); +#endif /* defined(VK_GGP_stream_descriptor_surface) */ +#if defined(VK_KHR_android_surface) + vkCreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR)load(context, "vkCreateAndroidSurfaceKHR"); +#endif /* defined(VK_KHR_android_surface) */ +#if defined(VK_KHR_cooperative_matrix) + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR)load(context, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR"); +#endif /* defined(VK_KHR_cooperative_matrix) */ +#if defined(VK_KHR_device_group_creation) + vkEnumeratePhysicalDeviceGroupsKHR = (PFN_vkEnumeratePhysicalDeviceGroupsKHR)load(context, "vkEnumeratePhysicalDeviceGroupsKHR"); +#endif /* defined(VK_KHR_device_group_creation) */ +#if defined(VK_KHR_display) + vkCreateDisplayModeKHR = (PFN_vkCreateDisplayModeKHR)load(context, "vkCreateDisplayModeKHR"); + vkCreateDisplayPlaneSurfaceKHR = (PFN_vkCreateDisplayPlaneSurfaceKHR)load(context, "vkCreateDisplayPlaneSurfaceKHR"); + vkGetDisplayModePropertiesKHR = (PFN_vkGetDisplayModePropertiesKHR)load(context, "vkGetDisplayModePropertiesKHR"); + vkGetDisplayPlaneCapabilitiesKHR = (PFN_vkGetDisplayPlaneCapabilitiesKHR)load(context, "vkGetDisplayPlaneCapabilitiesKHR"); + vkGetDisplayPlaneSupportedDisplaysKHR = (PFN_vkGetDisplayPlaneSupportedDisplaysKHR)load(context, "vkGetDisplayPlaneSupportedDisplaysKHR"); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)load(context, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR"); + vkGetPhysicalDeviceDisplayPropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)load(context, "vkGetPhysicalDeviceDisplayPropertiesKHR"); +#endif /* defined(VK_KHR_display) */ +#if defined(VK_KHR_external_fence_capabilities) + vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)load(context, "vkGetPhysicalDeviceExternalFencePropertiesKHR"); +#endif /* defined(VK_KHR_external_fence_capabilities) */ +#if defined(VK_KHR_external_memory_capabilities) + vkGetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)load(context, "vkGetPhysicalDeviceExternalBufferPropertiesKHR"); +#endif /* defined(VK_KHR_external_memory_capabilities) */ +#if defined(VK_KHR_external_semaphore_capabilities) + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)load(context, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR"); +#endif /* defined(VK_KHR_external_semaphore_capabilities) */ +#if defined(VK_KHR_fragment_shading_rate) + vkGetPhysicalDeviceFragmentShadingRatesKHR = (PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)load(context, "vkGetPhysicalDeviceFragmentShadingRatesKHR"); +#endif /* defined(VK_KHR_fragment_shading_rate) */ +#if defined(VK_KHR_get_display_properties2) + vkGetDisplayModeProperties2KHR = (PFN_vkGetDisplayModeProperties2KHR)load(context, "vkGetDisplayModeProperties2KHR"); + vkGetDisplayPlaneCapabilities2KHR = (PFN_vkGetDisplayPlaneCapabilities2KHR)load(context, "vkGetDisplayPlaneCapabilities2KHR"); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)load(context, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR"); + vkGetPhysicalDeviceDisplayProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayProperties2KHR)load(context, "vkGetPhysicalDeviceDisplayProperties2KHR"); +#endif /* defined(VK_KHR_get_display_properties2) */ +#if defined(VK_KHR_get_physical_device_properties2) + vkGetPhysicalDeviceFeatures2KHR = (PFN_vkGetPhysicalDeviceFeatures2KHR)load(context, "vkGetPhysicalDeviceFeatures2KHR"); + vkGetPhysicalDeviceFormatProperties2KHR = (PFN_vkGetPhysicalDeviceFormatProperties2KHR)load(context, "vkGetPhysicalDeviceFormatProperties2KHR"); + vkGetPhysicalDeviceImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)load(context, "vkGetPhysicalDeviceImageFormatProperties2KHR"); + vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)load(context, "vkGetPhysicalDeviceMemoryProperties2KHR"); + vkGetPhysicalDeviceProperties2KHR = (PFN_vkGetPhysicalDeviceProperties2KHR)load(context, "vkGetPhysicalDeviceProperties2KHR"); + vkGetPhysicalDeviceQueueFamilyProperties2KHR = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)load(context, "vkGetPhysicalDeviceQueueFamilyProperties2KHR"); + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)load(context, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR"); +#endif /* defined(VK_KHR_get_physical_device_properties2) */ +#if defined(VK_KHR_get_surface_capabilities2) + vkGetPhysicalDeviceSurfaceCapabilities2KHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)load(context, "vkGetPhysicalDeviceSurfaceCapabilities2KHR"); + vkGetPhysicalDeviceSurfaceFormats2KHR = (PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)load(context, "vkGetPhysicalDeviceSurfaceFormats2KHR"); +#endif /* defined(VK_KHR_get_surface_capabilities2) */ +#if defined(VK_KHR_performance_query) + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)load(context, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR"); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = (PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)load(context, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR"); +#endif /* defined(VK_KHR_performance_query) */ +#if defined(VK_KHR_surface) + vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)load(context, "vkDestroySurfaceKHR"); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)load(context, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); + vkGetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)load(context, "vkGetPhysicalDeviceSurfaceFormatsKHR"); + vkGetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)load(context, "vkGetPhysicalDeviceSurfacePresentModesKHR"); + vkGetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)load(context, "vkGetPhysicalDeviceSurfaceSupportKHR"); +#endif /* defined(VK_KHR_surface) */ +#if defined(VK_KHR_video_encode_queue) + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = (PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR)load(context, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR"); +#endif /* defined(VK_KHR_video_encode_queue) */ +#if defined(VK_KHR_video_queue) + vkGetPhysicalDeviceVideoCapabilitiesKHR = (PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)load(context, "vkGetPhysicalDeviceVideoCapabilitiesKHR"); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = (PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)load(context, "vkGetPhysicalDeviceVideoFormatPropertiesKHR"); +#endif /* defined(VK_KHR_video_queue) */ +#if defined(VK_KHR_wayland_surface) + vkCreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR)load(context, "vkCreateWaylandSurfaceKHR"); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)load(context, "vkGetPhysicalDeviceWaylandPresentationSupportKHR"); +#endif /* defined(VK_KHR_wayland_surface) */ +#if defined(VK_KHR_win32_surface) + vkCreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR)load(context, "vkCreateWin32SurfaceKHR"); + vkGetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)load(context, "vkGetPhysicalDeviceWin32PresentationSupportKHR"); +#endif /* defined(VK_KHR_win32_surface) */ +#if defined(VK_KHR_xcb_surface) + vkCreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR)load(context, "vkCreateXcbSurfaceKHR"); + vkGetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)load(context, "vkGetPhysicalDeviceXcbPresentationSupportKHR"); +#endif /* defined(VK_KHR_xcb_surface) */ +#if defined(VK_KHR_xlib_surface) + vkCreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR)load(context, "vkCreateXlibSurfaceKHR"); + vkGetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)load(context, "vkGetPhysicalDeviceXlibPresentationSupportKHR"); +#endif /* defined(VK_KHR_xlib_surface) */ +#if defined(VK_MVK_ios_surface) + vkCreateIOSSurfaceMVK = (PFN_vkCreateIOSSurfaceMVK)load(context, "vkCreateIOSSurfaceMVK"); +#endif /* defined(VK_MVK_ios_surface) */ +#if defined(VK_MVK_macos_surface) + vkCreateMacOSSurfaceMVK = (PFN_vkCreateMacOSSurfaceMVK)load(context, "vkCreateMacOSSurfaceMVK"); +#endif /* defined(VK_MVK_macos_surface) */ +#if defined(VK_NN_vi_surface) + vkCreateViSurfaceNN = (PFN_vkCreateViSurfaceNN)load(context, "vkCreateViSurfaceNN"); +#endif /* defined(VK_NN_vi_surface) */ +#if defined(VK_NV_acquire_winrt_display) + vkAcquireWinrtDisplayNV = (PFN_vkAcquireWinrtDisplayNV)load(context, "vkAcquireWinrtDisplayNV"); + vkGetWinrtDisplayNV = (PFN_vkGetWinrtDisplayNV)load(context, "vkGetWinrtDisplayNV"); +#endif /* defined(VK_NV_acquire_winrt_display) */ +#if defined(VK_NV_cooperative_matrix) + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)load(context, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV"); +#endif /* defined(VK_NV_cooperative_matrix) */ +#if defined(VK_NV_coverage_reduction_mode) + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = (PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)load(context, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV"); +#endif /* defined(VK_NV_coverage_reduction_mode) */ +#if defined(VK_NV_external_memory_capabilities) + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = (PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)load(context, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV"); +#endif /* defined(VK_NV_external_memory_capabilities) */ +#if defined(VK_NV_optical_flow) + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = (PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)load(context, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV"); +#endif /* defined(VK_NV_optical_flow) */ +#if defined(VK_QNX_screen_surface) + vkCreateScreenSurfaceQNX = (PFN_vkCreateScreenSurfaceQNX)load(context, "vkCreateScreenSurfaceQNX"); + vkGetPhysicalDeviceScreenPresentationSupportQNX = (PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)load(context, "vkGetPhysicalDeviceScreenPresentationSupportQNX"); +#endif /* defined(VK_QNX_screen_surface) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) + vkGetPhysicalDevicePresentRectanglesKHR = (PFN_vkGetPhysicalDevicePresentRectanglesKHR)load(context, "vkGetPhysicalDevicePresentRectanglesKHR"); +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ + /* VOLK_GENERATE_LOAD_INSTANCE */ +} + +static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) +{ + /* VOLK_GENERATE_LOAD_DEVICE */ +#if defined(VK_VERSION_1_0) + vkAllocateCommandBuffers = (PFN_vkAllocateCommandBuffers)load(context, "vkAllocateCommandBuffers"); + vkAllocateDescriptorSets = (PFN_vkAllocateDescriptorSets)load(context, "vkAllocateDescriptorSets"); + vkAllocateMemory = (PFN_vkAllocateMemory)load(context, "vkAllocateMemory"); + vkBeginCommandBuffer = (PFN_vkBeginCommandBuffer)load(context, "vkBeginCommandBuffer"); + vkBindBufferMemory = (PFN_vkBindBufferMemory)load(context, "vkBindBufferMemory"); + vkBindImageMemory = (PFN_vkBindImageMemory)load(context, "vkBindImageMemory"); + vkCmdBeginQuery = (PFN_vkCmdBeginQuery)load(context, "vkCmdBeginQuery"); + vkCmdBeginRenderPass = (PFN_vkCmdBeginRenderPass)load(context, "vkCmdBeginRenderPass"); + vkCmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets)load(context, "vkCmdBindDescriptorSets"); + vkCmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer)load(context, "vkCmdBindIndexBuffer"); + vkCmdBindPipeline = (PFN_vkCmdBindPipeline)load(context, "vkCmdBindPipeline"); + vkCmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers)load(context, "vkCmdBindVertexBuffers"); + vkCmdBlitImage = (PFN_vkCmdBlitImage)load(context, "vkCmdBlitImage"); + vkCmdClearAttachments = (PFN_vkCmdClearAttachments)load(context, "vkCmdClearAttachments"); + vkCmdClearColorImage = (PFN_vkCmdClearColorImage)load(context, "vkCmdClearColorImage"); + vkCmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage)load(context, "vkCmdClearDepthStencilImage"); + vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)load(context, "vkCmdCopyBuffer"); + vkCmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage)load(context, "vkCmdCopyBufferToImage"); + vkCmdCopyImage = (PFN_vkCmdCopyImage)load(context, "vkCmdCopyImage"); + vkCmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer)load(context, "vkCmdCopyImageToBuffer"); + vkCmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults)load(context, "vkCmdCopyQueryPoolResults"); + vkCmdDispatch = (PFN_vkCmdDispatch)load(context, "vkCmdDispatch"); + vkCmdDispatchIndirect = (PFN_vkCmdDispatchIndirect)load(context, "vkCmdDispatchIndirect"); + vkCmdDraw = (PFN_vkCmdDraw)load(context, "vkCmdDraw"); + vkCmdDrawIndexed = (PFN_vkCmdDrawIndexed)load(context, "vkCmdDrawIndexed"); + vkCmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect)load(context, "vkCmdDrawIndexedIndirect"); + vkCmdDrawIndirect = (PFN_vkCmdDrawIndirect)load(context, "vkCmdDrawIndirect"); + vkCmdEndQuery = (PFN_vkCmdEndQuery)load(context, "vkCmdEndQuery"); + vkCmdEndRenderPass = (PFN_vkCmdEndRenderPass)load(context, "vkCmdEndRenderPass"); + vkCmdExecuteCommands = (PFN_vkCmdExecuteCommands)load(context, "vkCmdExecuteCommands"); + vkCmdFillBuffer = (PFN_vkCmdFillBuffer)load(context, "vkCmdFillBuffer"); + vkCmdNextSubpass = (PFN_vkCmdNextSubpass)load(context, "vkCmdNextSubpass"); + vkCmdPipelineBarrier = (PFN_vkCmdPipelineBarrier)load(context, "vkCmdPipelineBarrier"); + vkCmdPushConstants = (PFN_vkCmdPushConstants)load(context, "vkCmdPushConstants"); + vkCmdResetEvent = (PFN_vkCmdResetEvent)load(context, "vkCmdResetEvent"); + vkCmdResetQueryPool = (PFN_vkCmdResetQueryPool)load(context, "vkCmdResetQueryPool"); + vkCmdResolveImage = (PFN_vkCmdResolveImage)load(context, "vkCmdResolveImage"); + vkCmdSetBlendConstants = (PFN_vkCmdSetBlendConstants)load(context, "vkCmdSetBlendConstants"); + vkCmdSetDepthBias = (PFN_vkCmdSetDepthBias)load(context, "vkCmdSetDepthBias"); + vkCmdSetDepthBounds = (PFN_vkCmdSetDepthBounds)load(context, "vkCmdSetDepthBounds"); + vkCmdSetEvent = (PFN_vkCmdSetEvent)load(context, "vkCmdSetEvent"); + vkCmdSetLineWidth = (PFN_vkCmdSetLineWidth)load(context, "vkCmdSetLineWidth"); + vkCmdSetScissor = (PFN_vkCmdSetScissor)load(context, "vkCmdSetScissor"); + vkCmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask)load(context, "vkCmdSetStencilCompareMask"); + vkCmdSetStencilReference = (PFN_vkCmdSetStencilReference)load(context, "vkCmdSetStencilReference"); + vkCmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask)load(context, "vkCmdSetStencilWriteMask"); + vkCmdSetViewport = (PFN_vkCmdSetViewport)load(context, "vkCmdSetViewport"); + vkCmdUpdateBuffer = (PFN_vkCmdUpdateBuffer)load(context, "vkCmdUpdateBuffer"); + vkCmdWaitEvents = (PFN_vkCmdWaitEvents)load(context, "vkCmdWaitEvents"); + vkCmdWriteTimestamp = (PFN_vkCmdWriteTimestamp)load(context, "vkCmdWriteTimestamp"); + vkCreateBuffer = (PFN_vkCreateBuffer)load(context, "vkCreateBuffer"); + vkCreateBufferView = (PFN_vkCreateBufferView)load(context, "vkCreateBufferView"); + vkCreateCommandPool = (PFN_vkCreateCommandPool)load(context, "vkCreateCommandPool"); + vkCreateComputePipelines = (PFN_vkCreateComputePipelines)load(context, "vkCreateComputePipelines"); + vkCreateDescriptorPool = (PFN_vkCreateDescriptorPool)load(context, "vkCreateDescriptorPool"); + vkCreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout)load(context, "vkCreateDescriptorSetLayout"); + vkCreateEvent = (PFN_vkCreateEvent)load(context, "vkCreateEvent"); + vkCreateFence = (PFN_vkCreateFence)load(context, "vkCreateFence"); + vkCreateFramebuffer = (PFN_vkCreateFramebuffer)load(context, "vkCreateFramebuffer"); + vkCreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines)load(context, "vkCreateGraphicsPipelines"); + vkCreateImage = (PFN_vkCreateImage)load(context, "vkCreateImage"); + vkCreateImageView = (PFN_vkCreateImageView)load(context, "vkCreateImageView"); + vkCreatePipelineCache = (PFN_vkCreatePipelineCache)load(context, "vkCreatePipelineCache"); + vkCreatePipelineLayout = (PFN_vkCreatePipelineLayout)load(context, "vkCreatePipelineLayout"); + vkCreateQueryPool = (PFN_vkCreateQueryPool)load(context, "vkCreateQueryPool"); + vkCreateRenderPass = (PFN_vkCreateRenderPass)load(context, "vkCreateRenderPass"); + vkCreateSampler = (PFN_vkCreateSampler)load(context, "vkCreateSampler"); + vkCreateSemaphore = (PFN_vkCreateSemaphore)load(context, "vkCreateSemaphore"); + vkCreateShaderModule = (PFN_vkCreateShaderModule)load(context, "vkCreateShaderModule"); + vkDestroyBuffer = (PFN_vkDestroyBuffer)load(context, "vkDestroyBuffer"); + vkDestroyBufferView = (PFN_vkDestroyBufferView)load(context, "vkDestroyBufferView"); + vkDestroyCommandPool = (PFN_vkDestroyCommandPool)load(context, "vkDestroyCommandPool"); + vkDestroyDescriptorPool = (PFN_vkDestroyDescriptorPool)load(context, "vkDestroyDescriptorPool"); + vkDestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout)load(context, "vkDestroyDescriptorSetLayout"); + vkDestroyDevice = (PFN_vkDestroyDevice)load(context, "vkDestroyDevice"); + vkDestroyEvent = (PFN_vkDestroyEvent)load(context, "vkDestroyEvent"); + vkDestroyFence = (PFN_vkDestroyFence)load(context, "vkDestroyFence"); + vkDestroyFramebuffer = (PFN_vkDestroyFramebuffer)load(context, "vkDestroyFramebuffer"); + vkDestroyImage = (PFN_vkDestroyImage)load(context, "vkDestroyImage"); + vkDestroyImageView = (PFN_vkDestroyImageView)load(context, "vkDestroyImageView"); + vkDestroyPipeline = (PFN_vkDestroyPipeline)load(context, "vkDestroyPipeline"); + vkDestroyPipelineCache = (PFN_vkDestroyPipelineCache)load(context, "vkDestroyPipelineCache"); + vkDestroyPipelineLayout = (PFN_vkDestroyPipelineLayout)load(context, "vkDestroyPipelineLayout"); + vkDestroyQueryPool = (PFN_vkDestroyQueryPool)load(context, "vkDestroyQueryPool"); + vkDestroyRenderPass = (PFN_vkDestroyRenderPass)load(context, "vkDestroyRenderPass"); + vkDestroySampler = (PFN_vkDestroySampler)load(context, "vkDestroySampler"); + vkDestroySemaphore = (PFN_vkDestroySemaphore)load(context, "vkDestroySemaphore"); + vkDestroyShaderModule = (PFN_vkDestroyShaderModule)load(context, "vkDestroyShaderModule"); + vkDeviceWaitIdle = (PFN_vkDeviceWaitIdle)load(context, "vkDeviceWaitIdle"); + vkEndCommandBuffer = (PFN_vkEndCommandBuffer)load(context, "vkEndCommandBuffer"); + vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)load(context, "vkFlushMappedMemoryRanges"); + vkFreeCommandBuffers = (PFN_vkFreeCommandBuffers)load(context, "vkFreeCommandBuffers"); + vkFreeDescriptorSets = (PFN_vkFreeDescriptorSets)load(context, "vkFreeDescriptorSets"); + vkFreeMemory = (PFN_vkFreeMemory)load(context, "vkFreeMemory"); + vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)load(context, "vkGetBufferMemoryRequirements"); + vkGetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment)load(context, "vkGetDeviceMemoryCommitment"); + vkGetDeviceQueue = (PFN_vkGetDeviceQueue)load(context, "vkGetDeviceQueue"); + vkGetEventStatus = (PFN_vkGetEventStatus)load(context, "vkGetEventStatus"); + vkGetFenceStatus = (PFN_vkGetFenceStatus)load(context, "vkGetFenceStatus"); + vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)load(context, "vkGetImageMemoryRequirements"); + vkGetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements)load(context, "vkGetImageSparseMemoryRequirements"); + vkGetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout)load(context, "vkGetImageSubresourceLayout"); + vkGetPipelineCacheData = (PFN_vkGetPipelineCacheData)load(context, "vkGetPipelineCacheData"); + vkGetQueryPoolResults = (PFN_vkGetQueryPoolResults)load(context, "vkGetQueryPoolResults"); + vkGetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity)load(context, "vkGetRenderAreaGranularity"); + vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)load(context, "vkInvalidateMappedMemoryRanges"); + vkMapMemory = (PFN_vkMapMemory)load(context, "vkMapMemory"); + vkMergePipelineCaches = (PFN_vkMergePipelineCaches)load(context, "vkMergePipelineCaches"); + vkQueueBindSparse = (PFN_vkQueueBindSparse)load(context, "vkQueueBindSparse"); + vkQueueSubmit = (PFN_vkQueueSubmit)load(context, "vkQueueSubmit"); + vkQueueWaitIdle = (PFN_vkQueueWaitIdle)load(context, "vkQueueWaitIdle"); + vkResetCommandBuffer = (PFN_vkResetCommandBuffer)load(context, "vkResetCommandBuffer"); + vkResetCommandPool = (PFN_vkResetCommandPool)load(context, "vkResetCommandPool"); + vkResetDescriptorPool = (PFN_vkResetDescriptorPool)load(context, "vkResetDescriptorPool"); + vkResetEvent = (PFN_vkResetEvent)load(context, "vkResetEvent"); + vkResetFences = (PFN_vkResetFences)load(context, "vkResetFences"); + vkSetEvent = (PFN_vkSetEvent)load(context, "vkSetEvent"); + vkUnmapMemory = (PFN_vkUnmapMemory)load(context, "vkUnmapMemory"); + vkUpdateDescriptorSets = (PFN_vkUpdateDescriptorSets)load(context, "vkUpdateDescriptorSets"); + vkWaitForFences = (PFN_vkWaitForFences)load(context, "vkWaitForFences"); +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_VERSION_1_1) + vkBindBufferMemory2 = (PFN_vkBindBufferMemory2)load(context, "vkBindBufferMemory2"); + vkBindImageMemory2 = (PFN_vkBindImageMemory2)load(context, "vkBindImageMemory2"); + vkCmdDispatchBase = (PFN_vkCmdDispatchBase)load(context, "vkCmdDispatchBase"); + vkCmdSetDeviceMask = (PFN_vkCmdSetDeviceMask)load(context, "vkCmdSetDeviceMask"); + vkCreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate)load(context, "vkCreateDescriptorUpdateTemplate"); + vkCreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion)load(context, "vkCreateSamplerYcbcrConversion"); + vkDestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate)load(context, "vkDestroyDescriptorUpdateTemplate"); + vkDestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion)load(context, "vkDestroySamplerYcbcrConversion"); + vkGetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2)load(context, "vkGetBufferMemoryRequirements2"); + vkGetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport)load(context, "vkGetDescriptorSetLayoutSupport"); + vkGetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures)load(context, "vkGetDeviceGroupPeerMemoryFeatures"); + vkGetDeviceQueue2 = (PFN_vkGetDeviceQueue2)load(context, "vkGetDeviceQueue2"); + vkGetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2)load(context, "vkGetImageMemoryRequirements2"); + vkGetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2)load(context, "vkGetImageSparseMemoryRequirements2"); + vkTrimCommandPool = (PFN_vkTrimCommandPool)load(context, "vkTrimCommandPool"); + vkUpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate)load(context, "vkUpdateDescriptorSetWithTemplate"); +#endif /* defined(VK_VERSION_1_1) */ +#if defined(VK_VERSION_1_2) + vkCmdBeginRenderPass2 = (PFN_vkCmdBeginRenderPass2)load(context, "vkCmdBeginRenderPass2"); + vkCmdDrawIndexedIndirectCount = (PFN_vkCmdDrawIndexedIndirectCount)load(context, "vkCmdDrawIndexedIndirectCount"); + vkCmdDrawIndirectCount = (PFN_vkCmdDrawIndirectCount)load(context, "vkCmdDrawIndirectCount"); + vkCmdEndRenderPass2 = (PFN_vkCmdEndRenderPass2)load(context, "vkCmdEndRenderPass2"); + vkCmdNextSubpass2 = (PFN_vkCmdNextSubpass2)load(context, "vkCmdNextSubpass2"); + vkCreateRenderPass2 = (PFN_vkCreateRenderPass2)load(context, "vkCreateRenderPass2"); + vkGetBufferDeviceAddress = (PFN_vkGetBufferDeviceAddress)load(context, "vkGetBufferDeviceAddress"); + vkGetBufferOpaqueCaptureAddress = (PFN_vkGetBufferOpaqueCaptureAddress)load(context, "vkGetBufferOpaqueCaptureAddress"); + vkGetDeviceMemoryOpaqueCaptureAddress = (PFN_vkGetDeviceMemoryOpaqueCaptureAddress)load(context, "vkGetDeviceMemoryOpaqueCaptureAddress"); + vkGetSemaphoreCounterValue = (PFN_vkGetSemaphoreCounterValue)load(context, "vkGetSemaphoreCounterValue"); + vkResetQueryPool = (PFN_vkResetQueryPool)load(context, "vkResetQueryPool"); + vkSignalSemaphore = (PFN_vkSignalSemaphore)load(context, "vkSignalSemaphore"); + vkWaitSemaphores = (PFN_vkWaitSemaphores)load(context, "vkWaitSemaphores"); +#endif /* defined(VK_VERSION_1_2) */ +#if defined(VK_VERSION_1_3) + vkCmdBeginRendering = (PFN_vkCmdBeginRendering)load(context, "vkCmdBeginRendering"); + vkCmdBindVertexBuffers2 = (PFN_vkCmdBindVertexBuffers2)load(context, "vkCmdBindVertexBuffers2"); + vkCmdBlitImage2 = (PFN_vkCmdBlitImage2)load(context, "vkCmdBlitImage2"); + vkCmdCopyBuffer2 = (PFN_vkCmdCopyBuffer2)load(context, "vkCmdCopyBuffer2"); + vkCmdCopyBufferToImage2 = (PFN_vkCmdCopyBufferToImage2)load(context, "vkCmdCopyBufferToImage2"); + vkCmdCopyImage2 = (PFN_vkCmdCopyImage2)load(context, "vkCmdCopyImage2"); + vkCmdCopyImageToBuffer2 = (PFN_vkCmdCopyImageToBuffer2)load(context, "vkCmdCopyImageToBuffer2"); + vkCmdEndRendering = (PFN_vkCmdEndRendering)load(context, "vkCmdEndRendering"); + vkCmdPipelineBarrier2 = (PFN_vkCmdPipelineBarrier2)load(context, "vkCmdPipelineBarrier2"); + vkCmdResetEvent2 = (PFN_vkCmdResetEvent2)load(context, "vkCmdResetEvent2"); + vkCmdResolveImage2 = (PFN_vkCmdResolveImage2)load(context, "vkCmdResolveImage2"); + vkCmdSetCullMode = (PFN_vkCmdSetCullMode)load(context, "vkCmdSetCullMode"); + vkCmdSetDepthBiasEnable = (PFN_vkCmdSetDepthBiasEnable)load(context, "vkCmdSetDepthBiasEnable"); + vkCmdSetDepthBoundsTestEnable = (PFN_vkCmdSetDepthBoundsTestEnable)load(context, "vkCmdSetDepthBoundsTestEnable"); + vkCmdSetDepthCompareOp = (PFN_vkCmdSetDepthCompareOp)load(context, "vkCmdSetDepthCompareOp"); + vkCmdSetDepthTestEnable = (PFN_vkCmdSetDepthTestEnable)load(context, "vkCmdSetDepthTestEnable"); + vkCmdSetDepthWriteEnable = (PFN_vkCmdSetDepthWriteEnable)load(context, "vkCmdSetDepthWriteEnable"); + vkCmdSetEvent2 = (PFN_vkCmdSetEvent2)load(context, "vkCmdSetEvent2"); + vkCmdSetFrontFace = (PFN_vkCmdSetFrontFace)load(context, "vkCmdSetFrontFace"); + vkCmdSetPrimitiveRestartEnable = (PFN_vkCmdSetPrimitiveRestartEnable)load(context, "vkCmdSetPrimitiveRestartEnable"); + vkCmdSetPrimitiveTopology = (PFN_vkCmdSetPrimitiveTopology)load(context, "vkCmdSetPrimitiveTopology"); + vkCmdSetRasterizerDiscardEnable = (PFN_vkCmdSetRasterizerDiscardEnable)load(context, "vkCmdSetRasterizerDiscardEnable"); + vkCmdSetScissorWithCount = (PFN_vkCmdSetScissorWithCount)load(context, "vkCmdSetScissorWithCount"); + vkCmdSetStencilOp = (PFN_vkCmdSetStencilOp)load(context, "vkCmdSetStencilOp"); + vkCmdSetStencilTestEnable = (PFN_vkCmdSetStencilTestEnable)load(context, "vkCmdSetStencilTestEnable"); + vkCmdSetViewportWithCount = (PFN_vkCmdSetViewportWithCount)load(context, "vkCmdSetViewportWithCount"); + vkCmdWaitEvents2 = (PFN_vkCmdWaitEvents2)load(context, "vkCmdWaitEvents2"); + vkCmdWriteTimestamp2 = (PFN_vkCmdWriteTimestamp2)load(context, "vkCmdWriteTimestamp2"); + vkCreatePrivateDataSlot = (PFN_vkCreatePrivateDataSlot)load(context, "vkCreatePrivateDataSlot"); + vkDestroyPrivateDataSlot = (PFN_vkDestroyPrivateDataSlot)load(context, "vkDestroyPrivateDataSlot"); + vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)load(context, "vkGetDeviceBufferMemoryRequirements"); + vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)load(context, "vkGetDeviceImageMemoryRequirements"); + vkGetDeviceImageSparseMemoryRequirements = (PFN_vkGetDeviceImageSparseMemoryRequirements)load(context, "vkGetDeviceImageSparseMemoryRequirements"); + vkGetPrivateData = (PFN_vkGetPrivateData)load(context, "vkGetPrivateData"); + vkQueueSubmit2 = (PFN_vkQueueSubmit2)load(context, "vkQueueSubmit2"); + vkSetPrivateData = (PFN_vkSetPrivateData)load(context, "vkSetPrivateData"); +#endif /* defined(VK_VERSION_1_3) */ +#if defined(VK_AMDX_shader_enqueue) + vkCmdDispatchGraphAMDX = (PFN_vkCmdDispatchGraphAMDX)load(context, "vkCmdDispatchGraphAMDX"); + vkCmdDispatchGraphIndirectAMDX = (PFN_vkCmdDispatchGraphIndirectAMDX)load(context, "vkCmdDispatchGraphIndirectAMDX"); + vkCmdDispatchGraphIndirectCountAMDX = (PFN_vkCmdDispatchGraphIndirectCountAMDX)load(context, "vkCmdDispatchGraphIndirectCountAMDX"); + vkCmdInitializeGraphScratchMemoryAMDX = (PFN_vkCmdInitializeGraphScratchMemoryAMDX)load(context, "vkCmdInitializeGraphScratchMemoryAMDX"); + vkCreateExecutionGraphPipelinesAMDX = (PFN_vkCreateExecutionGraphPipelinesAMDX)load(context, "vkCreateExecutionGraphPipelinesAMDX"); + vkGetExecutionGraphPipelineNodeIndexAMDX = (PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)load(context, "vkGetExecutionGraphPipelineNodeIndexAMDX"); + vkGetExecutionGraphPipelineScratchSizeAMDX = (PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)load(context, "vkGetExecutionGraphPipelineScratchSizeAMDX"); +#endif /* defined(VK_AMDX_shader_enqueue) */ +#if defined(VK_AMD_buffer_marker) + vkCmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD)load(context, "vkCmdWriteBufferMarkerAMD"); +#endif /* defined(VK_AMD_buffer_marker) */ +#if defined(VK_AMD_display_native_hdr) + vkSetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD)load(context, "vkSetLocalDimmingAMD"); +#endif /* defined(VK_AMD_display_native_hdr) */ +#if defined(VK_AMD_draw_indirect_count) + vkCmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)load(context, "vkCmdDrawIndexedIndirectCountAMD"); + vkCmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)load(context, "vkCmdDrawIndirectCountAMD"); +#endif /* defined(VK_AMD_draw_indirect_count) */ +#if defined(VK_AMD_shader_info) + vkGetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)load(context, "vkGetShaderInfoAMD"); +#endif /* defined(VK_AMD_shader_info) */ +#if defined(VK_ANDROID_external_memory_android_hardware_buffer) + vkGetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)load(context, "vkGetAndroidHardwareBufferPropertiesANDROID"); + vkGetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID)load(context, "vkGetMemoryAndroidHardwareBufferANDROID"); +#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ +#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) + vkCmdSetAttachmentFeedbackLoopEnableEXT = (PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT)load(context, "vkCmdSetAttachmentFeedbackLoopEnableEXT"); +#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ +#if defined(VK_EXT_buffer_device_address) + vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)load(context, "vkGetBufferDeviceAddressEXT"); +#endif /* defined(VK_EXT_buffer_device_address) */ +#if defined(VK_EXT_calibrated_timestamps) + vkGetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT)load(context, "vkGetCalibratedTimestampsEXT"); +#endif /* defined(VK_EXT_calibrated_timestamps) */ +#if defined(VK_EXT_color_write_enable) + vkCmdSetColorWriteEnableEXT = (PFN_vkCmdSetColorWriteEnableEXT)load(context, "vkCmdSetColorWriteEnableEXT"); +#endif /* defined(VK_EXT_color_write_enable) */ +#if defined(VK_EXT_conditional_rendering) + vkCmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)load(context, "vkCmdBeginConditionalRenderingEXT"); + vkCmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)load(context, "vkCmdEndConditionalRenderingEXT"); +#endif /* defined(VK_EXT_conditional_rendering) */ +#if defined(VK_EXT_debug_marker) + vkCmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT)load(context, "vkCmdDebugMarkerBeginEXT"); + vkCmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT)load(context, "vkCmdDebugMarkerEndEXT"); + vkCmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT)load(context, "vkCmdDebugMarkerInsertEXT"); + vkDebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)load(context, "vkDebugMarkerSetObjectNameEXT"); + vkDebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)load(context, "vkDebugMarkerSetObjectTagEXT"); +#endif /* defined(VK_EXT_debug_marker) */ +#if defined(VK_EXT_depth_bias_control) + vkCmdSetDepthBias2EXT = (PFN_vkCmdSetDepthBias2EXT)load(context, "vkCmdSetDepthBias2EXT"); +#endif /* defined(VK_EXT_depth_bias_control) */ +#if defined(VK_EXT_descriptor_buffer) + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = (PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)load(context, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT"); + vkCmdBindDescriptorBuffersEXT = (PFN_vkCmdBindDescriptorBuffersEXT)load(context, "vkCmdBindDescriptorBuffersEXT"); + vkCmdSetDescriptorBufferOffsetsEXT = (PFN_vkCmdSetDescriptorBufferOffsetsEXT)load(context, "vkCmdSetDescriptorBufferOffsetsEXT"); + vkGetBufferOpaqueCaptureDescriptorDataEXT = (PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)load(context, "vkGetBufferOpaqueCaptureDescriptorDataEXT"); + vkGetDescriptorEXT = (PFN_vkGetDescriptorEXT)load(context, "vkGetDescriptorEXT"); + vkGetDescriptorSetLayoutBindingOffsetEXT = (PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)load(context, "vkGetDescriptorSetLayoutBindingOffsetEXT"); + vkGetDescriptorSetLayoutSizeEXT = (PFN_vkGetDescriptorSetLayoutSizeEXT)load(context, "vkGetDescriptorSetLayoutSizeEXT"); + vkGetImageOpaqueCaptureDescriptorDataEXT = (PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)load(context, "vkGetImageOpaqueCaptureDescriptorDataEXT"); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = (PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)load(context, "vkGetImageViewOpaqueCaptureDescriptorDataEXT"); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = (PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)load(context, "vkGetSamplerOpaqueCaptureDescriptorDataEXT"); +#endif /* defined(VK_EXT_descriptor_buffer) */ +#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = (PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)load(context, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT"); +#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ +#if defined(VK_EXT_device_fault) + vkGetDeviceFaultInfoEXT = (PFN_vkGetDeviceFaultInfoEXT)load(context, "vkGetDeviceFaultInfoEXT"); +#endif /* defined(VK_EXT_device_fault) */ +#if defined(VK_EXT_discard_rectangles) + vkCmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)load(context, "vkCmdSetDiscardRectangleEXT"); +#endif /* defined(VK_EXT_discard_rectangles) */ +#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 + vkCmdSetDiscardRectangleEnableEXT = (PFN_vkCmdSetDiscardRectangleEnableEXT)load(context, "vkCmdSetDiscardRectangleEnableEXT"); + vkCmdSetDiscardRectangleModeEXT = (PFN_vkCmdSetDiscardRectangleModeEXT)load(context, "vkCmdSetDiscardRectangleModeEXT"); +#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ +#if defined(VK_EXT_display_control) + vkDisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT)load(context, "vkDisplayPowerControlEXT"); + vkGetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT)load(context, "vkGetSwapchainCounterEXT"); + vkRegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT)load(context, "vkRegisterDeviceEventEXT"); + vkRegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT)load(context, "vkRegisterDisplayEventEXT"); +#endif /* defined(VK_EXT_display_control) */ +#if defined(VK_EXT_external_memory_host) + vkGetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)load(context, "vkGetMemoryHostPointerPropertiesEXT"); +#endif /* defined(VK_EXT_external_memory_host) */ +#if defined(VK_EXT_full_screen_exclusive) + vkAcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT)load(context, "vkAcquireFullScreenExclusiveModeEXT"); + vkReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT)load(context, "vkReleaseFullScreenExclusiveModeEXT"); +#endif /* defined(VK_EXT_full_screen_exclusive) */ +#if defined(VK_EXT_hdr_metadata) + vkSetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)load(context, "vkSetHdrMetadataEXT"); +#endif /* defined(VK_EXT_hdr_metadata) */ +#if defined(VK_EXT_host_image_copy) + vkCopyImageToImageEXT = (PFN_vkCopyImageToImageEXT)load(context, "vkCopyImageToImageEXT"); + vkCopyImageToMemoryEXT = (PFN_vkCopyImageToMemoryEXT)load(context, "vkCopyImageToMemoryEXT"); + vkCopyMemoryToImageEXT = (PFN_vkCopyMemoryToImageEXT)load(context, "vkCopyMemoryToImageEXT"); + vkTransitionImageLayoutEXT = (PFN_vkTransitionImageLayoutEXT)load(context, "vkTransitionImageLayoutEXT"); +#endif /* defined(VK_EXT_host_image_copy) */ +#if defined(VK_EXT_host_query_reset) + vkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)load(context, "vkResetQueryPoolEXT"); +#endif /* defined(VK_EXT_host_query_reset) */ +#if defined(VK_EXT_image_drm_format_modifier) + vkGetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)load(context, "vkGetImageDrmFormatModifierPropertiesEXT"); +#endif /* defined(VK_EXT_image_drm_format_modifier) */ +#if defined(VK_EXT_line_rasterization) + vkCmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)load(context, "vkCmdSetLineStippleEXT"); +#endif /* defined(VK_EXT_line_rasterization) */ +#if defined(VK_EXT_mesh_shader) + vkCmdDrawMeshTasksEXT = (PFN_vkCmdDrawMeshTasksEXT)load(context, "vkCmdDrawMeshTasksEXT"); + vkCmdDrawMeshTasksIndirectCountEXT = (PFN_vkCmdDrawMeshTasksIndirectCountEXT)load(context, "vkCmdDrawMeshTasksIndirectCountEXT"); + vkCmdDrawMeshTasksIndirectEXT = (PFN_vkCmdDrawMeshTasksIndirectEXT)load(context, "vkCmdDrawMeshTasksIndirectEXT"); +#endif /* defined(VK_EXT_mesh_shader) */ +#if defined(VK_EXT_metal_objects) + vkExportMetalObjectsEXT = (PFN_vkExportMetalObjectsEXT)load(context, "vkExportMetalObjectsEXT"); +#endif /* defined(VK_EXT_metal_objects) */ +#if defined(VK_EXT_multi_draw) + vkCmdDrawMultiEXT = (PFN_vkCmdDrawMultiEXT)load(context, "vkCmdDrawMultiEXT"); + vkCmdDrawMultiIndexedEXT = (PFN_vkCmdDrawMultiIndexedEXT)load(context, "vkCmdDrawMultiIndexedEXT"); +#endif /* defined(VK_EXT_multi_draw) */ +#if defined(VK_EXT_opacity_micromap) + vkBuildMicromapsEXT = (PFN_vkBuildMicromapsEXT)load(context, "vkBuildMicromapsEXT"); + vkCmdBuildMicromapsEXT = (PFN_vkCmdBuildMicromapsEXT)load(context, "vkCmdBuildMicromapsEXT"); + vkCmdCopyMemoryToMicromapEXT = (PFN_vkCmdCopyMemoryToMicromapEXT)load(context, "vkCmdCopyMemoryToMicromapEXT"); + vkCmdCopyMicromapEXT = (PFN_vkCmdCopyMicromapEXT)load(context, "vkCmdCopyMicromapEXT"); + vkCmdCopyMicromapToMemoryEXT = (PFN_vkCmdCopyMicromapToMemoryEXT)load(context, "vkCmdCopyMicromapToMemoryEXT"); + vkCmdWriteMicromapsPropertiesEXT = (PFN_vkCmdWriteMicromapsPropertiesEXT)load(context, "vkCmdWriteMicromapsPropertiesEXT"); + vkCopyMemoryToMicromapEXT = (PFN_vkCopyMemoryToMicromapEXT)load(context, "vkCopyMemoryToMicromapEXT"); + vkCopyMicromapEXT = (PFN_vkCopyMicromapEXT)load(context, "vkCopyMicromapEXT"); + vkCopyMicromapToMemoryEXT = (PFN_vkCopyMicromapToMemoryEXT)load(context, "vkCopyMicromapToMemoryEXT"); + vkCreateMicromapEXT = (PFN_vkCreateMicromapEXT)load(context, "vkCreateMicromapEXT"); + vkDestroyMicromapEXT = (PFN_vkDestroyMicromapEXT)load(context, "vkDestroyMicromapEXT"); + vkGetDeviceMicromapCompatibilityEXT = (PFN_vkGetDeviceMicromapCompatibilityEXT)load(context, "vkGetDeviceMicromapCompatibilityEXT"); + vkGetMicromapBuildSizesEXT = (PFN_vkGetMicromapBuildSizesEXT)load(context, "vkGetMicromapBuildSizesEXT"); + vkWriteMicromapsPropertiesEXT = (PFN_vkWriteMicromapsPropertiesEXT)load(context, "vkWriteMicromapsPropertiesEXT"); +#endif /* defined(VK_EXT_opacity_micromap) */ +#if defined(VK_EXT_pageable_device_local_memory) + vkSetDeviceMemoryPriorityEXT = (PFN_vkSetDeviceMemoryPriorityEXT)load(context, "vkSetDeviceMemoryPriorityEXT"); +#endif /* defined(VK_EXT_pageable_device_local_memory) */ +#if defined(VK_EXT_pipeline_properties) + vkGetPipelinePropertiesEXT = (PFN_vkGetPipelinePropertiesEXT)load(context, "vkGetPipelinePropertiesEXT"); +#endif /* defined(VK_EXT_pipeline_properties) */ +#if defined(VK_EXT_private_data) + vkCreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT)load(context, "vkCreatePrivateDataSlotEXT"); + vkDestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT)load(context, "vkDestroyPrivateDataSlotEXT"); + vkGetPrivateDataEXT = (PFN_vkGetPrivateDataEXT)load(context, "vkGetPrivateDataEXT"); + vkSetPrivateDataEXT = (PFN_vkSetPrivateDataEXT)load(context, "vkSetPrivateDataEXT"); +#endif /* defined(VK_EXT_private_data) */ +#if defined(VK_EXT_sample_locations) + vkCmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)load(context, "vkCmdSetSampleLocationsEXT"); +#endif /* defined(VK_EXT_sample_locations) */ +#if defined(VK_EXT_shader_module_identifier) + vkGetShaderModuleCreateInfoIdentifierEXT = (PFN_vkGetShaderModuleCreateInfoIdentifierEXT)load(context, "vkGetShaderModuleCreateInfoIdentifierEXT"); + vkGetShaderModuleIdentifierEXT = (PFN_vkGetShaderModuleIdentifierEXT)load(context, "vkGetShaderModuleIdentifierEXT"); +#endif /* defined(VK_EXT_shader_module_identifier) */ +#if defined(VK_EXT_shader_object) + vkCmdBindShadersEXT = (PFN_vkCmdBindShadersEXT)load(context, "vkCmdBindShadersEXT"); + vkCreateShadersEXT = (PFN_vkCreateShadersEXT)load(context, "vkCreateShadersEXT"); + vkDestroyShaderEXT = (PFN_vkDestroyShaderEXT)load(context, "vkDestroyShaderEXT"); + vkGetShaderBinaryDataEXT = (PFN_vkGetShaderBinaryDataEXT)load(context, "vkGetShaderBinaryDataEXT"); +#endif /* defined(VK_EXT_shader_object) */ +#if defined(VK_EXT_swapchain_maintenance1) + vkReleaseSwapchainImagesEXT = (PFN_vkReleaseSwapchainImagesEXT)load(context, "vkReleaseSwapchainImagesEXT"); +#endif /* defined(VK_EXT_swapchain_maintenance1) */ +#if defined(VK_EXT_transform_feedback) + vkCmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)load(context, "vkCmdBeginQueryIndexedEXT"); + vkCmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)load(context, "vkCmdBeginTransformFeedbackEXT"); + vkCmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT)load(context, "vkCmdBindTransformFeedbackBuffersEXT"); + vkCmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)load(context, "vkCmdDrawIndirectByteCountEXT"); + vkCmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT)load(context, "vkCmdEndQueryIndexedEXT"); + vkCmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT)load(context, "vkCmdEndTransformFeedbackEXT"); +#endif /* defined(VK_EXT_transform_feedback) */ +#if defined(VK_EXT_validation_cache) + vkCreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)load(context, "vkCreateValidationCacheEXT"); + vkDestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)load(context, "vkDestroyValidationCacheEXT"); + vkGetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)load(context, "vkGetValidationCacheDataEXT"); + vkMergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)load(context, "vkMergeValidationCachesEXT"); +#endif /* defined(VK_EXT_validation_cache) */ +#if defined(VK_FUCHSIA_buffer_collection) + vkCreateBufferCollectionFUCHSIA = (PFN_vkCreateBufferCollectionFUCHSIA)load(context, "vkCreateBufferCollectionFUCHSIA"); + vkDestroyBufferCollectionFUCHSIA = (PFN_vkDestroyBufferCollectionFUCHSIA)load(context, "vkDestroyBufferCollectionFUCHSIA"); + vkGetBufferCollectionPropertiesFUCHSIA = (PFN_vkGetBufferCollectionPropertiesFUCHSIA)load(context, "vkGetBufferCollectionPropertiesFUCHSIA"); + vkSetBufferCollectionBufferConstraintsFUCHSIA = (PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)load(context, "vkSetBufferCollectionBufferConstraintsFUCHSIA"); + vkSetBufferCollectionImageConstraintsFUCHSIA = (PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)load(context, "vkSetBufferCollectionImageConstraintsFUCHSIA"); +#endif /* defined(VK_FUCHSIA_buffer_collection) */ +#if defined(VK_FUCHSIA_external_memory) + vkGetMemoryZirconHandleFUCHSIA = (PFN_vkGetMemoryZirconHandleFUCHSIA)load(context, "vkGetMemoryZirconHandleFUCHSIA"); + vkGetMemoryZirconHandlePropertiesFUCHSIA = (PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)load(context, "vkGetMemoryZirconHandlePropertiesFUCHSIA"); +#endif /* defined(VK_FUCHSIA_external_memory) */ +#if defined(VK_FUCHSIA_external_semaphore) + vkGetSemaphoreZirconHandleFUCHSIA = (PFN_vkGetSemaphoreZirconHandleFUCHSIA)load(context, "vkGetSemaphoreZirconHandleFUCHSIA"); + vkImportSemaphoreZirconHandleFUCHSIA = (PFN_vkImportSemaphoreZirconHandleFUCHSIA)load(context, "vkImportSemaphoreZirconHandleFUCHSIA"); +#endif /* defined(VK_FUCHSIA_external_semaphore) */ +#if defined(VK_GOOGLE_display_timing) + vkGetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE)load(context, "vkGetPastPresentationTimingGOOGLE"); + vkGetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE)load(context, "vkGetRefreshCycleDurationGOOGLE"); +#endif /* defined(VK_GOOGLE_display_timing) */ +#if defined(VK_HUAWEI_cluster_culling_shader) + vkCmdDrawClusterHUAWEI = (PFN_vkCmdDrawClusterHUAWEI)load(context, "vkCmdDrawClusterHUAWEI"); + vkCmdDrawClusterIndirectHUAWEI = (PFN_vkCmdDrawClusterIndirectHUAWEI)load(context, "vkCmdDrawClusterIndirectHUAWEI"); +#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ +#if defined(VK_HUAWEI_invocation_mask) + vkCmdBindInvocationMaskHUAWEI = (PFN_vkCmdBindInvocationMaskHUAWEI)load(context, "vkCmdBindInvocationMaskHUAWEI"); +#endif /* defined(VK_HUAWEI_invocation_mask) */ +#if defined(VK_HUAWEI_subpass_shading) + vkCmdSubpassShadingHUAWEI = (PFN_vkCmdSubpassShadingHUAWEI)load(context, "vkCmdSubpassShadingHUAWEI"); + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = (PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)load(context, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI"); +#endif /* defined(VK_HUAWEI_subpass_shading) */ +#if defined(VK_INTEL_performance_query) + vkAcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL)load(context, "vkAcquirePerformanceConfigurationINTEL"); + vkCmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL)load(context, "vkCmdSetPerformanceMarkerINTEL"); + vkCmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL)load(context, "vkCmdSetPerformanceOverrideINTEL"); + vkCmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL)load(context, "vkCmdSetPerformanceStreamMarkerINTEL"); + vkGetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL)load(context, "vkGetPerformanceParameterINTEL"); + vkInitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL)load(context, "vkInitializePerformanceApiINTEL"); + vkQueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL)load(context, "vkQueueSetPerformanceConfigurationINTEL"); + vkReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL)load(context, "vkReleasePerformanceConfigurationINTEL"); + vkUninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL)load(context, "vkUninitializePerformanceApiINTEL"); +#endif /* defined(VK_INTEL_performance_query) */ +#if defined(VK_KHR_acceleration_structure) + vkBuildAccelerationStructuresKHR = (PFN_vkBuildAccelerationStructuresKHR)load(context, "vkBuildAccelerationStructuresKHR"); + vkCmdBuildAccelerationStructuresIndirectKHR = (PFN_vkCmdBuildAccelerationStructuresIndirectKHR)load(context, "vkCmdBuildAccelerationStructuresIndirectKHR"); + vkCmdBuildAccelerationStructuresKHR = (PFN_vkCmdBuildAccelerationStructuresKHR)load(context, "vkCmdBuildAccelerationStructuresKHR"); + vkCmdCopyAccelerationStructureKHR = (PFN_vkCmdCopyAccelerationStructureKHR)load(context, "vkCmdCopyAccelerationStructureKHR"); + vkCmdCopyAccelerationStructureToMemoryKHR = (PFN_vkCmdCopyAccelerationStructureToMemoryKHR)load(context, "vkCmdCopyAccelerationStructureToMemoryKHR"); + vkCmdCopyMemoryToAccelerationStructureKHR = (PFN_vkCmdCopyMemoryToAccelerationStructureKHR)load(context, "vkCmdCopyMemoryToAccelerationStructureKHR"); + vkCmdWriteAccelerationStructuresPropertiesKHR = (PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)load(context, "vkCmdWriteAccelerationStructuresPropertiesKHR"); + vkCopyAccelerationStructureKHR = (PFN_vkCopyAccelerationStructureKHR)load(context, "vkCopyAccelerationStructureKHR"); + vkCopyAccelerationStructureToMemoryKHR = (PFN_vkCopyAccelerationStructureToMemoryKHR)load(context, "vkCopyAccelerationStructureToMemoryKHR"); + vkCopyMemoryToAccelerationStructureKHR = (PFN_vkCopyMemoryToAccelerationStructureKHR)load(context, "vkCopyMemoryToAccelerationStructureKHR"); + vkCreateAccelerationStructureKHR = (PFN_vkCreateAccelerationStructureKHR)load(context, "vkCreateAccelerationStructureKHR"); + vkDestroyAccelerationStructureKHR = (PFN_vkDestroyAccelerationStructureKHR)load(context, "vkDestroyAccelerationStructureKHR"); + vkGetAccelerationStructureBuildSizesKHR = (PFN_vkGetAccelerationStructureBuildSizesKHR)load(context, "vkGetAccelerationStructureBuildSizesKHR"); + vkGetAccelerationStructureDeviceAddressKHR = (PFN_vkGetAccelerationStructureDeviceAddressKHR)load(context, "vkGetAccelerationStructureDeviceAddressKHR"); + vkGetDeviceAccelerationStructureCompatibilityKHR = (PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)load(context, "vkGetDeviceAccelerationStructureCompatibilityKHR"); + vkWriteAccelerationStructuresPropertiesKHR = (PFN_vkWriteAccelerationStructuresPropertiesKHR)load(context, "vkWriteAccelerationStructuresPropertiesKHR"); +#endif /* defined(VK_KHR_acceleration_structure) */ +#if defined(VK_KHR_bind_memory2) + vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)load(context, "vkBindBufferMemory2KHR"); + vkBindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)load(context, "vkBindImageMemory2KHR"); +#endif /* defined(VK_KHR_bind_memory2) */ +#if defined(VK_KHR_buffer_device_address) + vkGetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR)load(context, "vkGetBufferDeviceAddressKHR"); + vkGetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)load(context, "vkGetBufferOpaqueCaptureAddressKHR"); + vkGetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)load(context, "vkGetDeviceMemoryOpaqueCaptureAddressKHR"); +#endif /* defined(VK_KHR_buffer_device_address) */ +#if defined(VK_KHR_copy_commands2) + vkCmdBlitImage2KHR = (PFN_vkCmdBlitImage2KHR)load(context, "vkCmdBlitImage2KHR"); + vkCmdCopyBuffer2KHR = (PFN_vkCmdCopyBuffer2KHR)load(context, "vkCmdCopyBuffer2KHR"); + vkCmdCopyBufferToImage2KHR = (PFN_vkCmdCopyBufferToImage2KHR)load(context, "vkCmdCopyBufferToImage2KHR"); + vkCmdCopyImage2KHR = (PFN_vkCmdCopyImage2KHR)load(context, "vkCmdCopyImage2KHR"); + vkCmdCopyImageToBuffer2KHR = (PFN_vkCmdCopyImageToBuffer2KHR)load(context, "vkCmdCopyImageToBuffer2KHR"); + vkCmdResolveImage2KHR = (PFN_vkCmdResolveImage2KHR)load(context, "vkCmdResolveImage2KHR"); +#endif /* defined(VK_KHR_copy_commands2) */ +#if defined(VK_KHR_create_renderpass2) + vkCmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR)load(context, "vkCmdBeginRenderPass2KHR"); + vkCmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)load(context, "vkCmdEndRenderPass2KHR"); + vkCmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)load(context, "vkCmdNextSubpass2KHR"); + vkCreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)load(context, "vkCreateRenderPass2KHR"); +#endif /* defined(VK_KHR_create_renderpass2) */ +#if defined(VK_KHR_deferred_host_operations) + vkCreateDeferredOperationKHR = (PFN_vkCreateDeferredOperationKHR)load(context, "vkCreateDeferredOperationKHR"); + vkDeferredOperationJoinKHR = (PFN_vkDeferredOperationJoinKHR)load(context, "vkDeferredOperationJoinKHR"); + vkDestroyDeferredOperationKHR = (PFN_vkDestroyDeferredOperationKHR)load(context, "vkDestroyDeferredOperationKHR"); + vkGetDeferredOperationMaxConcurrencyKHR = (PFN_vkGetDeferredOperationMaxConcurrencyKHR)load(context, "vkGetDeferredOperationMaxConcurrencyKHR"); + vkGetDeferredOperationResultKHR = (PFN_vkGetDeferredOperationResultKHR)load(context, "vkGetDeferredOperationResultKHR"); +#endif /* defined(VK_KHR_deferred_host_operations) */ +#if defined(VK_KHR_descriptor_update_template) + vkCreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR)load(context, "vkCreateDescriptorUpdateTemplateKHR"); + vkDestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR)load(context, "vkDestroyDescriptorUpdateTemplateKHR"); + vkUpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR)load(context, "vkUpdateDescriptorSetWithTemplateKHR"); +#endif /* defined(VK_KHR_descriptor_update_template) */ +#if defined(VK_KHR_device_group) + vkCmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR)load(context, "vkCmdDispatchBaseKHR"); + vkCmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR)load(context, "vkCmdSetDeviceMaskKHR"); + vkGetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)load(context, "vkGetDeviceGroupPeerMemoryFeaturesKHR"); +#endif /* defined(VK_KHR_device_group) */ +#if defined(VK_KHR_display_swapchain) + vkCreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR)load(context, "vkCreateSharedSwapchainsKHR"); +#endif /* defined(VK_KHR_display_swapchain) */ +#if defined(VK_KHR_draw_indirect_count) + vkCmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR)load(context, "vkCmdDrawIndexedIndirectCountKHR"); + vkCmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR)load(context, "vkCmdDrawIndirectCountKHR"); +#endif /* defined(VK_KHR_draw_indirect_count) */ +#if defined(VK_KHR_dynamic_rendering) + vkCmdBeginRenderingKHR = (PFN_vkCmdBeginRenderingKHR)load(context, "vkCmdBeginRenderingKHR"); + vkCmdEndRenderingKHR = (PFN_vkCmdEndRenderingKHR)load(context, "vkCmdEndRenderingKHR"); +#endif /* defined(VK_KHR_dynamic_rendering) */ +#if defined(VK_KHR_external_fence_fd) + vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)load(context, "vkGetFenceFdKHR"); + vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)load(context, "vkImportFenceFdKHR"); +#endif /* defined(VK_KHR_external_fence_fd) */ +#if defined(VK_KHR_external_fence_win32) + vkGetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR)load(context, "vkGetFenceWin32HandleKHR"); + vkImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR)load(context, "vkImportFenceWin32HandleKHR"); +#endif /* defined(VK_KHR_external_fence_win32) */ +#if defined(VK_KHR_external_memory_fd) + vkGetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)load(context, "vkGetMemoryFdKHR"); + vkGetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR)load(context, "vkGetMemoryFdPropertiesKHR"); +#endif /* defined(VK_KHR_external_memory_fd) */ +#if defined(VK_KHR_external_memory_win32) + vkGetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)load(context, "vkGetMemoryWin32HandleKHR"); + vkGetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR)load(context, "vkGetMemoryWin32HandlePropertiesKHR"); +#endif /* defined(VK_KHR_external_memory_win32) */ +#if defined(VK_KHR_external_semaphore_fd) + vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)load(context, "vkGetSemaphoreFdKHR"); + vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)load(context, "vkImportSemaphoreFdKHR"); +#endif /* defined(VK_KHR_external_semaphore_fd) */ +#if defined(VK_KHR_external_semaphore_win32) + vkGetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR)load(context, "vkGetSemaphoreWin32HandleKHR"); + vkImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR)load(context, "vkImportSemaphoreWin32HandleKHR"); +#endif /* defined(VK_KHR_external_semaphore_win32) */ +#if defined(VK_KHR_fragment_shading_rate) + vkCmdSetFragmentShadingRateKHR = (PFN_vkCmdSetFragmentShadingRateKHR)load(context, "vkCmdSetFragmentShadingRateKHR"); +#endif /* defined(VK_KHR_fragment_shading_rate) */ +#if defined(VK_KHR_get_memory_requirements2) + vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)load(context, "vkGetBufferMemoryRequirements2KHR"); + vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)load(context, "vkGetImageMemoryRequirements2KHR"); + vkGetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)load(context, "vkGetImageSparseMemoryRequirements2KHR"); +#endif /* defined(VK_KHR_get_memory_requirements2) */ +#if defined(VK_KHR_maintenance1) + vkTrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR)load(context, "vkTrimCommandPoolKHR"); +#endif /* defined(VK_KHR_maintenance1) */ +#if defined(VK_KHR_maintenance3) + vkGetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR)load(context, "vkGetDescriptorSetLayoutSupportKHR"); +#endif /* defined(VK_KHR_maintenance3) */ +#if defined(VK_KHR_maintenance4) + vkGetDeviceBufferMemoryRequirementsKHR = (PFN_vkGetDeviceBufferMemoryRequirementsKHR)load(context, "vkGetDeviceBufferMemoryRequirementsKHR"); + vkGetDeviceImageMemoryRequirementsKHR = (PFN_vkGetDeviceImageMemoryRequirementsKHR)load(context, "vkGetDeviceImageMemoryRequirementsKHR"); + vkGetDeviceImageSparseMemoryRequirementsKHR = (PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)load(context, "vkGetDeviceImageSparseMemoryRequirementsKHR"); +#endif /* defined(VK_KHR_maintenance4) */ +#if defined(VK_KHR_maintenance5) + vkCmdBindIndexBuffer2KHR = (PFN_vkCmdBindIndexBuffer2KHR)load(context, "vkCmdBindIndexBuffer2KHR"); + vkGetDeviceImageSubresourceLayoutKHR = (PFN_vkGetDeviceImageSubresourceLayoutKHR)load(context, "vkGetDeviceImageSubresourceLayoutKHR"); + vkGetImageSubresourceLayout2KHR = (PFN_vkGetImageSubresourceLayout2KHR)load(context, "vkGetImageSubresourceLayout2KHR"); + vkGetRenderingAreaGranularityKHR = (PFN_vkGetRenderingAreaGranularityKHR)load(context, "vkGetRenderingAreaGranularityKHR"); +#endif /* defined(VK_KHR_maintenance5) */ +#if defined(VK_KHR_map_memory2) + vkMapMemory2KHR = (PFN_vkMapMemory2KHR)load(context, "vkMapMemory2KHR"); + vkUnmapMemory2KHR = (PFN_vkUnmapMemory2KHR)load(context, "vkUnmapMemory2KHR"); +#endif /* defined(VK_KHR_map_memory2) */ +#if defined(VK_KHR_performance_query) + vkAcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)load(context, "vkAcquireProfilingLockKHR"); + vkReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)load(context, "vkReleaseProfilingLockKHR"); +#endif /* defined(VK_KHR_performance_query) */ +#if defined(VK_KHR_pipeline_executable_properties) + vkGetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)load(context, "vkGetPipelineExecutableInternalRepresentationsKHR"); + vkGetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)load(context, "vkGetPipelineExecutablePropertiesKHR"); + vkGetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)load(context, "vkGetPipelineExecutableStatisticsKHR"); +#endif /* defined(VK_KHR_pipeline_executable_properties) */ +#if defined(VK_KHR_present_wait) + vkWaitForPresentKHR = (PFN_vkWaitForPresentKHR)load(context, "vkWaitForPresentKHR"); +#endif /* defined(VK_KHR_present_wait) */ +#if defined(VK_KHR_push_descriptor) + vkCmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)load(context, "vkCmdPushDescriptorSetKHR"); +#endif /* defined(VK_KHR_push_descriptor) */ +#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) + vkCmdTraceRaysIndirect2KHR = (PFN_vkCmdTraceRaysIndirect2KHR)load(context, "vkCmdTraceRaysIndirect2KHR"); +#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_ray_tracing_pipeline) + vkCmdSetRayTracingPipelineStackSizeKHR = (PFN_vkCmdSetRayTracingPipelineStackSizeKHR)load(context, "vkCmdSetRayTracingPipelineStackSizeKHR"); + vkCmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR)load(context, "vkCmdTraceRaysIndirectKHR"); + vkCmdTraceRaysKHR = (PFN_vkCmdTraceRaysKHR)load(context, "vkCmdTraceRaysKHR"); + vkCreateRayTracingPipelinesKHR = (PFN_vkCreateRayTracingPipelinesKHR)load(context, "vkCreateRayTracingPipelinesKHR"); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = (PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)load(context, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR"); + vkGetRayTracingShaderGroupHandlesKHR = (PFN_vkGetRayTracingShaderGroupHandlesKHR)load(context, "vkGetRayTracingShaderGroupHandlesKHR"); + vkGetRayTracingShaderGroupStackSizeKHR = (PFN_vkGetRayTracingShaderGroupStackSizeKHR)load(context, "vkGetRayTracingShaderGroupStackSizeKHR"); +#endif /* defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_sampler_ycbcr_conversion) + vkCreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)load(context, "vkCreateSamplerYcbcrConversionKHR"); + vkDestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)load(context, "vkDestroySamplerYcbcrConversionKHR"); +#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ +#if defined(VK_KHR_shared_presentable_image) + vkGetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR)load(context, "vkGetSwapchainStatusKHR"); +#endif /* defined(VK_KHR_shared_presentable_image) */ +#if defined(VK_KHR_swapchain) + vkAcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)load(context, "vkAcquireNextImageKHR"); + vkCreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)load(context, "vkCreateSwapchainKHR"); + vkDestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)load(context, "vkDestroySwapchainKHR"); + vkGetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)load(context, "vkGetSwapchainImagesKHR"); + vkQueuePresentKHR = (PFN_vkQueuePresentKHR)load(context, "vkQueuePresentKHR"); +#endif /* defined(VK_KHR_swapchain) */ +#if defined(VK_KHR_synchronization2) + vkCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)load(context, "vkCmdPipelineBarrier2KHR"); + vkCmdResetEvent2KHR = (PFN_vkCmdResetEvent2KHR)load(context, "vkCmdResetEvent2KHR"); + vkCmdSetEvent2KHR = (PFN_vkCmdSetEvent2KHR)load(context, "vkCmdSetEvent2KHR"); + vkCmdWaitEvents2KHR = (PFN_vkCmdWaitEvents2KHR)load(context, "vkCmdWaitEvents2KHR"); + vkCmdWriteTimestamp2KHR = (PFN_vkCmdWriteTimestamp2KHR)load(context, "vkCmdWriteTimestamp2KHR"); + vkQueueSubmit2KHR = (PFN_vkQueueSubmit2KHR)load(context, "vkQueueSubmit2KHR"); +#endif /* defined(VK_KHR_synchronization2) */ +#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) + vkCmdWriteBufferMarker2AMD = (PFN_vkCmdWriteBufferMarker2AMD)load(context, "vkCmdWriteBufferMarker2AMD"); +#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ +#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) + vkGetQueueCheckpointData2NV = (PFN_vkGetQueueCheckpointData2NV)load(context, "vkGetQueueCheckpointData2NV"); +#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_KHR_timeline_semaphore) + vkGetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR)load(context, "vkGetSemaphoreCounterValueKHR"); + vkSignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)load(context, "vkSignalSemaphoreKHR"); + vkWaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)load(context, "vkWaitSemaphoresKHR"); +#endif /* defined(VK_KHR_timeline_semaphore) */ +#if defined(VK_KHR_video_decode_queue) + vkCmdDecodeVideoKHR = (PFN_vkCmdDecodeVideoKHR)load(context, "vkCmdDecodeVideoKHR"); +#endif /* defined(VK_KHR_video_decode_queue) */ +#if defined(VK_KHR_video_encode_queue) + vkCmdEncodeVideoKHR = (PFN_vkCmdEncodeVideoKHR)load(context, "vkCmdEncodeVideoKHR"); + vkGetEncodedVideoSessionParametersKHR = (PFN_vkGetEncodedVideoSessionParametersKHR)load(context, "vkGetEncodedVideoSessionParametersKHR"); +#endif /* defined(VK_KHR_video_encode_queue) */ +#if defined(VK_KHR_video_queue) + vkBindVideoSessionMemoryKHR = (PFN_vkBindVideoSessionMemoryKHR)load(context, "vkBindVideoSessionMemoryKHR"); + vkCmdBeginVideoCodingKHR = (PFN_vkCmdBeginVideoCodingKHR)load(context, "vkCmdBeginVideoCodingKHR"); + vkCmdControlVideoCodingKHR = (PFN_vkCmdControlVideoCodingKHR)load(context, "vkCmdControlVideoCodingKHR"); + vkCmdEndVideoCodingKHR = (PFN_vkCmdEndVideoCodingKHR)load(context, "vkCmdEndVideoCodingKHR"); + vkCreateVideoSessionKHR = (PFN_vkCreateVideoSessionKHR)load(context, "vkCreateVideoSessionKHR"); + vkCreateVideoSessionParametersKHR = (PFN_vkCreateVideoSessionParametersKHR)load(context, "vkCreateVideoSessionParametersKHR"); + vkDestroyVideoSessionKHR = (PFN_vkDestroyVideoSessionKHR)load(context, "vkDestroyVideoSessionKHR"); + vkDestroyVideoSessionParametersKHR = (PFN_vkDestroyVideoSessionParametersKHR)load(context, "vkDestroyVideoSessionParametersKHR"); + vkGetVideoSessionMemoryRequirementsKHR = (PFN_vkGetVideoSessionMemoryRequirementsKHR)load(context, "vkGetVideoSessionMemoryRequirementsKHR"); + vkUpdateVideoSessionParametersKHR = (PFN_vkUpdateVideoSessionParametersKHR)load(context, "vkUpdateVideoSessionParametersKHR"); +#endif /* defined(VK_KHR_video_queue) */ +#if defined(VK_NVX_binary_import) + vkCmdCuLaunchKernelNVX = (PFN_vkCmdCuLaunchKernelNVX)load(context, "vkCmdCuLaunchKernelNVX"); + vkCreateCuFunctionNVX = (PFN_vkCreateCuFunctionNVX)load(context, "vkCreateCuFunctionNVX"); + vkCreateCuModuleNVX = (PFN_vkCreateCuModuleNVX)load(context, "vkCreateCuModuleNVX"); + vkDestroyCuFunctionNVX = (PFN_vkDestroyCuFunctionNVX)load(context, "vkDestroyCuFunctionNVX"); + vkDestroyCuModuleNVX = (PFN_vkDestroyCuModuleNVX)load(context, "vkDestroyCuModuleNVX"); +#endif /* defined(VK_NVX_binary_import) */ +#if defined(VK_NVX_image_view_handle) + vkGetImageViewAddressNVX = (PFN_vkGetImageViewAddressNVX)load(context, "vkGetImageViewAddressNVX"); + vkGetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)load(context, "vkGetImageViewHandleNVX"); +#endif /* defined(VK_NVX_image_view_handle) */ +#if defined(VK_NV_clip_space_w_scaling) + vkCmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)load(context, "vkCmdSetViewportWScalingNV"); +#endif /* defined(VK_NV_clip_space_w_scaling) */ +#if defined(VK_NV_copy_memory_indirect) + vkCmdCopyMemoryIndirectNV = (PFN_vkCmdCopyMemoryIndirectNV)load(context, "vkCmdCopyMemoryIndirectNV"); + vkCmdCopyMemoryToImageIndirectNV = (PFN_vkCmdCopyMemoryToImageIndirectNV)load(context, "vkCmdCopyMemoryToImageIndirectNV"); +#endif /* defined(VK_NV_copy_memory_indirect) */ +#if defined(VK_NV_cuda_kernel_launch) + vkCmdCudaLaunchKernelNV = (PFN_vkCmdCudaLaunchKernelNV)load(context, "vkCmdCudaLaunchKernelNV"); + vkCreateCudaFunctionNV = (PFN_vkCreateCudaFunctionNV)load(context, "vkCreateCudaFunctionNV"); + vkCreateCudaModuleNV = (PFN_vkCreateCudaModuleNV)load(context, "vkCreateCudaModuleNV"); + vkDestroyCudaFunctionNV = (PFN_vkDestroyCudaFunctionNV)load(context, "vkDestroyCudaFunctionNV"); + vkDestroyCudaModuleNV = (PFN_vkDestroyCudaModuleNV)load(context, "vkDestroyCudaModuleNV"); + vkGetCudaModuleCacheNV = (PFN_vkGetCudaModuleCacheNV)load(context, "vkGetCudaModuleCacheNV"); +#endif /* defined(VK_NV_cuda_kernel_launch) */ +#if defined(VK_NV_device_diagnostic_checkpoints) + vkCmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV)load(context, "vkCmdSetCheckpointNV"); + vkGetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV)load(context, "vkGetQueueCheckpointDataNV"); +#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_NV_device_generated_commands) + vkCmdBindPipelineShaderGroupNV = (PFN_vkCmdBindPipelineShaderGroupNV)load(context, "vkCmdBindPipelineShaderGroupNV"); + vkCmdExecuteGeneratedCommandsNV = (PFN_vkCmdExecuteGeneratedCommandsNV)load(context, "vkCmdExecuteGeneratedCommandsNV"); + vkCmdPreprocessGeneratedCommandsNV = (PFN_vkCmdPreprocessGeneratedCommandsNV)load(context, "vkCmdPreprocessGeneratedCommandsNV"); + vkCreateIndirectCommandsLayoutNV = (PFN_vkCreateIndirectCommandsLayoutNV)load(context, "vkCreateIndirectCommandsLayoutNV"); + vkDestroyIndirectCommandsLayoutNV = (PFN_vkDestroyIndirectCommandsLayoutNV)load(context, "vkDestroyIndirectCommandsLayoutNV"); + vkGetGeneratedCommandsMemoryRequirementsNV = (PFN_vkGetGeneratedCommandsMemoryRequirementsNV)load(context, "vkGetGeneratedCommandsMemoryRequirementsNV"); +#endif /* defined(VK_NV_device_generated_commands) */ +#if defined(VK_NV_device_generated_commands_compute) + vkCmdUpdatePipelineIndirectBufferNV = (PFN_vkCmdUpdatePipelineIndirectBufferNV)load(context, "vkCmdUpdatePipelineIndirectBufferNV"); + vkGetPipelineIndirectDeviceAddressNV = (PFN_vkGetPipelineIndirectDeviceAddressNV)load(context, "vkGetPipelineIndirectDeviceAddressNV"); + vkGetPipelineIndirectMemoryRequirementsNV = (PFN_vkGetPipelineIndirectMemoryRequirementsNV)load(context, "vkGetPipelineIndirectMemoryRequirementsNV"); +#endif /* defined(VK_NV_device_generated_commands_compute) */ +#if defined(VK_NV_external_memory_rdma) + vkGetMemoryRemoteAddressNV = (PFN_vkGetMemoryRemoteAddressNV)load(context, "vkGetMemoryRemoteAddressNV"); +#endif /* defined(VK_NV_external_memory_rdma) */ +#if defined(VK_NV_external_memory_win32) + vkGetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)load(context, "vkGetMemoryWin32HandleNV"); +#endif /* defined(VK_NV_external_memory_win32) */ +#if defined(VK_NV_fragment_shading_rate_enums) + vkCmdSetFragmentShadingRateEnumNV = (PFN_vkCmdSetFragmentShadingRateEnumNV)load(context, "vkCmdSetFragmentShadingRateEnumNV"); +#endif /* defined(VK_NV_fragment_shading_rate_enums) */ +#if defined(VK_NV_low_latency2) + vkGetLatencyTimingsNV = (PFN_vkGetLatencyTimingsNV)load(context, "vkGetLatencyTimingsNV"); + vkLatencySleepNV = (PFN_vkLatencySleepNV)load(context, "vkLatencySleepNV"); + vkQueueNotifyOutOfBandNV = (PFN_vkQueueNotifyOutOfBandNV)load(context, "vkQueueNotifyOutOfBandNV"); + vkSetLatencyMarkerNV = (PFN_vkSetLatencyMarkerNV)load(context, "vkSetLatencyMarkerNV"); + vkSetLatencySleepModeNV = (PFN_vkSetLatencySleepModeNV)load(context, "vkSetLatencySleepModeNV"); +#endif /* defined(VK_NV_low_latency2) */ +#if defined(VK_NV_memory_decompression) + vkCmdDecompressMemoryIndirectCountNV = (PFN_vkCmdDecompressMemoryIndirectCountNV)load(context, "vkCmdDecompressMemoryIndirectCountNV"); + vkCmdDecompressMemoryNV = (PFN_vkCmdDecompressMemoryNV)load(context, "vkCmdDecompressMemoryNV"); +#endif /* defined(VK_NV_memory_decompression) */ +#if defined(VK_NV_mesh_shader) + vkCmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV)load(context, "vkCmdDrawMeshTasksIndirectCountNV"); + vkCmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)load(context, "vkCmdDrawMeshTasksIndirectNV"); + vkCmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)load(context, "vkCmdDrawMeshTasksNV"); +#endif /* defined(VK_NV_mesh_shader) */ +#if defined(VK_NV_optical_flow) + vkBindOpticalFlowSessionImageNV = (PFN_vkBindOpticalFlowSessionImageNV)load(context, "vkBindOpticalFlowSessionImageNV"); + vkCmdOpticalFlowExecuteNV = (PFN_vkCmdOpticalFlowExecuteNV)load(context, "vkCmdOpticalFlowExecuteNV"); + vkCreateOpticalFlowSessionNV = (PFN_vkCreateOpticalFlowSessionNV)load(context, "vkCreateOpticalFlowSessionNV"); + vkDestroyOpticalFlowSessionNV = (PFN_vkDestroyOpticalFlowSessionNV)load(context, "vkDestroyOpticalFlowSessionNV"); +#endif /* defined(VK_NV_optical_flow) */ +#if defined(VK_NV_ray_tracing) + vkBindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)load(context, "vkBindAccelerationStructureMemoryNV"); + vkCmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)load(context, "vkCmdBuildAccelerationStructureNV"); + vkCmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)load(context, "vkCmdCopyAccelerationStructureNV"); + vkCmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)load(context, "vkCmdTraceRaysNV"); + vkCmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)load(context, "vkCmdWriteAccelerationStructuresPropertiesNV"); + vkCompileDeferredNV = (PFN_vkCompileDeferredNV)load(context, "vkCompileDeferredNV"); + vkCreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)load(context, "vkCreateAccelerationStructureNV"); + vkCreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)load(context, "vkCreateRayTracingPipelinesNV"); + vkDestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)load(context, "vkDestroyAccelerationStructureNV"); + vkGetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)load(context, "vkGetAccelerationStructureHandleNV"); + vkGetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)load(context, "vkGetAccelerationStructureMemoryRequirementsNV"); + vkGetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)load(context, "vkGetRayTracingShaderGroupHandlesNV"); +#endif /* defined(VK_NV_ray_tracing) */ +#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 + vkCmdSetExclusiveScissorEnableNV = (PFN_vkCmdSetExclusiveScissorEnableNV)load(context, "vkCmdSetExclusiveScissorEnableNV"); +#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ +#if defined(VK_NV_scissor_exclusive) + vkCmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV)load(context, "vkCmdSetExclusiveScissorNV"); +#endif /* defined(VK_NV_scissor_exclusive) */ +#if defined(VK_NV_shading_rate_image) + vkCmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV)load(context, "vkCmdBindShadingRateImageNV"); + vkCmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)load(context, "vkCmdSetCoarseSampleOrderNV"); + vkCmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)load(context, "vkCmdSetViewportShadingRatePaletteNV"); +#endif /* defined(VK_NV_shading_rate_image) */ +#if defined(VK_QCOM_tile_properties) + vkGetDynamicRenderingTilePropertiesQCOM = (PFN_vkGetDynamicRenderingTilePropertiesQCOM)load(context, "vkGetDynamicRenderingTilePropertiesQCOM"); + vkGetFramebufferTilePropertiesQCOM = (PFN_vkGetFramebufferTilePropertiesQCOM)load(context, "vkGetFramebufferTilePropertiesQCOM"); +#endif /* defined(VK_QCOM_tile_properties) */ +#if defined(VK_QNX_external_memory_screen_buffer) + vkGetScreenBufferPropertiesQNX = (PFN_vkGetScreenBufferPropertiesQNX)load(context, "vkGetScreenBufferPropertiesQNX"); +#endif /* defined(VK_QNX_external_memory_screen_buffer) */ +#if defined(VK_VALVE_descriptor_set_host_mapping) + vkGetDescriptorSetHostMappingVALVE = (PFN_vkGetDescriptorSetHostMappingVALVE)load(context, "vkGetDescriptorSetHostMappingVALVE"); + vkGetDescriptorSetLayoutHostMappingInfoVALVE = (PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)load(context, "vkGetDescriptorSetLayoutHostMappingInfoVALVE"); +#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + vkCmdBindVertexBuffers2EXT = (PFN_vkCmdBindVertexBuffers2EXT)load(context, "vkCmdBindVertexBuffers2EXT"); + vkCmdSetCullModeEXT = (PFN_vkCmdSetCullModeEXT)load(context, "vkCmdSetCullModeEXT"); + vkCmdSetDepthBoundsTestEnableEXT = (PFN_vkCmdSetDepthBoundsTestEnableEXT)load(context, "vkCmdSetDepthBoundsTestEnableEXT"); + vkCmdSetDepthCompareOpEXT = (PFN_vkCmdSetDepthCompareOpEXT)load(context, "vkCmdSetDepthCompareOpEXT"); + vkCmdSetDepthTestEnableEXT = (PFN_vkCmdSetDepthTestEnableEXT)load(context, "vkCmdSetDepthTestEnableEXT"); + vkCmdSetDepthWriteEnableEXT = (PFN_vkCmdSetDepthWriteEnableEXT)load(context, "vkCmdSetDepthWriteEnableEXT"); + vkCmdSetFrontFaceEXT = (PFN_vkCmdSetFrontFaceEXT)load(context, "vkCmdSetFrontFaceEXT"); + vkCmdSetPrimitiveTopologyEXT = (PFN_vkCmdSetPrimitiveTopologyEXT)load(context, "vkCmdSetPrimitiveTopologyEXT"); + vkCmdSetScissorWithCountEXT = (PFN_vkCmdSetScissorWithCountEXT)load(context, "vkCmdSetScissorWithCountEXT"); + vkCmdSetStencilOpEXT = (PFN_vkCmdSetStencilOpEXT)load(context, "vkCmdSetStencilOpEXT"); + vkCmdSetStencilTestEnableEXT = (PFN_vkCmdSetStencilTestEnableEXT)load(context, "vkCmdSetStencilTestEnableEXT"); + vkCmdSetViewportWithCountEXT = (PFN_vkCmdSetViewportWithCountEXT)load(context, "vkCmdSetViewportWithCountEXT"); +#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + vkCmdSetDepthBiasEnableEXT = (PFN_vkCmdSetDepthBiasEnableEXT)load(context, "vkCmdSetDepthBiasEnableEXT"); + vkCmdSetLogicOpEXT = (PFN_vkCmdSetLogicOpEXT)load(context, "vkCmdSetLogicOpEXT"); + vkCmdSetPatchControlPointsEXT = (PFN_vkCmdSetPatchControlPointsEXT)load(context, "vkCmdSetPatchControlPointsEXT"); + vkCmdSetPrimitiveRestartEnableEXT = (PFN_vkCmdSetPrimitiveRestartEnableEXT)load(context, "vkCmdSetPrimitiveRestartEnableEXT"); + vkCmdSetRasterizerDiscardEnableEXT = (PFN_vkCmdSetRasterizerDiscardEnableEXT)load(context, "vkCmdSetRasterizerDiscardEnableEXT"); +#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + vkCmdSetAlphaToCoverageEnableEXT = (PFN_vkCmdSetAlphaToCoverageEnableEXT)load(context, "vkCmdSetAlphaToCoverageEnableEXT"); + vkCmdSetAlphaToOneEnableEXT = (PFN_vkCmdSetAlphaToOneEnableEXT)load(context, "vkCmdSetAlphaToOneEnableEXT"); + vkCmdSetColorBlendAdvancedEXT = (PFN_vkCmdSetColorBlendAdvancedEXT)load(context, "vkCmdSetColorBlendAdvancedEXT"); + vkCmdSetColorBlendEnableEXT = (PFN_vkCmdSetColorBlendEnableEXT)load(context, "vkCmdSetColorBlendEnableEXT"); + vkCmdSetColorBlendEquationEXT = (PFN_vkCmdSetColorBlendEquationEXT)load(context, "vkCmdSetColorBlendEquationEXT"); + vkCmdSetColorWriteMaskEXT = (PFN_vkCmdSetColorWriteMaskEXT)load(context, "vkCmdSetColorWriteMaskEXT"); + vkCmdSetConservativeRasterizationModeEXT = (PFN_vkCmdSetConservativeRasterizationModeEXT)load(context, "vkCmdSetConservativeRasterizationModeEXT"); + vkCmdSetDepthClampEnableEXT = (PFN_vkCmdSetDepthClampEnableEXT)load(context, "vkCmdSetDepthClampEnableEXT"); + vkCmdSetDepthClipEnableEXT = (PFN_vkCmdSetDepthClipEnableEXT)load(context, "vkCmdSetDepthClipEnableEXT"); + vkCmdSetDepthClipNegativeOneToOneEXT = (PFN_vkCmdSetDepthClipNegativeOneToOneEXT)load(context, "vkCmdSetDepthClipNegativeOneToOneEXT"); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = (PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)load(context, "vkCmdSetExtraPrimitiveOverestimationSizeEXT"); + vkCmdSetLineRasterizationModeEXT = (PFN_vkCmdSetLineRasterizationModeEXT)load(context, "vkCmdSetLineRasterizationModeEXT"); + vkCmdSetLineStippleEnableEXT = (PFN_vkCmdSetLineStippleEnableEXT)load(context, "vkCmdSetLineStippleEnableEXT"); + vkCmdSetLogicOpEnableEXT = (PFN_vkCmdSetLogicOpEnableEXT)load(context, "vkCmdSetLogicOpEnableEXT"); + vkCmdSetPolygonModeEXT = (PFN_vkCmdSetPolygonModeEXT)load(context, "vkCmdSetPolygonModeEXT"); + vkCmdSetProvokingVertexModeEXT = (PFN_vkCmdSetProvokingVertexModeEXT)load(context, "vkCmdSetProvokingVertexModeEXT"); + vkCmdSetRasterizationSamplesEXT = (PFN_vkCmdSetRasterizationSamplesEXT)load(context, "vkCmdSetRasterizationSamplesEXT"); + vkCmdSetRasterizationStreamEXT = (PFN_vkCmdSetRasterizationStreamEXT)load(context, "vkCmdSetRasterizationStreamEXT"); + vkCmdSetSampleLocationsEnableEXT = (PFN_vkCmdSetSampleLocationsEnableEXT)load(context, "vkCmdSetSampleLocationsEnableEXT"); + vkCmdSetSampleMaskEXT = (PFN_vkCmdSetSampleMaskEXT)load(context, "vkCmdSetSampleMaskEXT"); + vkCmdSetTessellationDomainOriginEXT = (PFN_vkCmdSetTessellationDomainOriginEXT)load(context, "vkCmdSetTessellationDomainOriginEXT"); +#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) + vkCmdSetViewportWScalingEnableNV = (PFN_vkCmdSetViewportWScalingEnableNV)load(context, "vkCmdSetViewportWScalingEnableNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) + vkCmdSetViewportSwizzleNV = (PFN_vkCmdSetViewportSwizzleNV)load(context, "vkCmdSetViewportSwizzleNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) + vkCmdSetCoverageToColorEnableNV = (PFN_vkCmdSetCoverageToColorEnableNV)load(context, "vkCmdSetCoverageToColorEnableNV"); + vkCmdSetCoverageToColorLocationNV = (PFN_vkCmdSetCoverageToColorLocationNV)load(context, "vkCmdSetCoverageToColorLocationNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) + vkCmdSetCoverageModulationModeNV = (PFN_vkCmdSetCoverageModulationModeNV)load(context, "vkCmdSetCoverageModulationModeNV"); + vkCmdSetCoverageModulationTableEnableNV = (PFN_vkCmdSetCoverageModulationTableEnableNV)load(context, "vkCmdSetCoverageModulationTableEnableNV"); + vkCmdSetCoverageModulationTableNV = (PFN_vkCmdSetCoverageModulationTableNV)load(context, "vkCmdSetCoverageModulationTableNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) + vkCmdSetShadingRateImageEnableNV = (PFN_vkCmdSetShadingRateImageEnableNV)load(context, "vkCmdSetShadingRateImageEnableNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) + vkCmdSetRepresentativeFragmentTestEnableNV = (PFN_vkCmdSetRepresentativeFragmentTestEnableNV)load(context, "vkCmdSetRepresentativeFragmentTestEnableNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) + vkCmdSetCoverageReductionModeNV = (PFN_vkCmdSetCoverageReductionModeNV)load(context, "vkCmdSetCoverageReductionModeNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ +#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) + vkGetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)load(context, "vkGetDeviceGroupSurfacePresentModes2EXT"); +#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) + vkGetImageSubresourceLayout2EXT = (PFN_vkGetImageSubresourceLayout2EXT)load(context, "vkGetImageSubresourceLayout2EXT"); +#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ +#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) + vkCmdSetVertexInputEXT = (PFN_vkCmdSetVertexInputEXT)load(context, "vkCmdSetVertexInputEXT"); +#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ +#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) + vkCmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)load(context, "vkCmdPushDescriptorSetWithTemplateKHR"); +#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) + vkGetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR)load(context, "vkGetDeviceGroupPresentCapabilitiesKHR"); + vkGetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR)load(context, "vkGetDeviceGroupSurfacePresentModesKHR"); +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) + vkAcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR)load(context, "vkAcquireNextImage2KHR"); +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ + /* VOLK_GENERATE_LOAD_DEVICE */ +} + +static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context, PFN_vkVoidFunction (*load)(void*, const char*)) +{ + /* VOLK_GENERATE_LOAD_DEVICE_TABLE */ +#if defined(VK_VERSION_1_0) + table->vkAllocateCommandBuffers = (PFN_vkAllocateCommandBuffers)load(context, "vkAllocateCommandBuffers"); + table->vkAllocateDescriptorSets = (PFN_vkAllocateDescriptorSets)load(context, "vkAllocateDescriptorSets"); + table->vkAllocateMemory = (PFN_vkAllocateMemory)load(context, "vkAllocateMemory"); + table->vkBeginCommandBuffer = (PFN_vkBeginCommandBuffer)load(context, "vkBeginCommandBuffer"); + table->vkBindBufferMemory = (PFN_vkBindBufferMemory)load(context, "vkBindBufferMemory"); + table->vkBindImageMemory = (PFN_vkBindImageMemory)load(context, "vkBindImageMemory"); + table->vkCmdBeginQuery = (PFN_vkCmdBeginQuery)load(context, "vkCmdBeginQuery"); + table->vkCmdBeginRenderPass = (PFN_vkCmdBeginRenderPass)load(context, "vkCmdBeginRenderPass"); + table->vkCmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets)load(context, "vkCmdBindDescriptorSets"); + table->vkCmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer)load(context, "vkCmdBindIndexBuffer"); + table->vkCmdBindPipeline = (PFN_vkCmdBindPipeline)load(context, "vkCmdBindPipeline"); + table->vkCmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers)load(context, "vkCmdBindVertexBuffers"); + table->vkCmdBlitImage = (PFN_vkCmdBlitImage)load(context, "vkCmdBlitImage"); + table->vkCmdClearAttachments = (PFN_vkCmdClearAttachments)load(context, "vkCmdClearAttachments"); + table->vkCmdClearColorImage = (PFN_vkCmdClearColorImage)load(context, "vkCmdClearColorImage"); + table->vkCmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage)load(context, "vkCmdClearDepthStencilImage"); + table->vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)load(context, "vkCmdCopyBuffer"); + table->vkCmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage)load(context, "vkCmdCopyBufferToImage"); + table->vkCmdCopyImage = (PFN_vkCmdCopyImage)load(context, "vkCmdCopyImage"); + table->vkCmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer)load(context, "vkCmdCopyImageToBuffer"); + table->vkCmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults)load(context, "vkCmdCopyQueryPoolResults"); + table->vkCmdDispatch = (PFN_vkCmdDispatch)load(context, "vkCmdDispatch"); + table->vkCmdDispatchIndirect = (PFN_vkCmdDispatchIndirect)load(context, "vkCmdDispatchIndirect"); + table->vkCmdDraw = (PFN_vkCmdDraw)load(context, "vkCmdDraw"); + table->vkCmdDrawIndexed = (PFN_vkCmdDrawIndexed)load(context, "vkCmdDrawIndexed"); + table->vkCmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect)load(context, "vkCmdDrawIndexedIndirect"); + table->vkCmdDrawIndirect = (PFN_vkCmdDrawIndirect)load(context, "vkCmdDrawIndirect"); + table->vkCmdEndQuery = (PFN_vkCmdEndQuery)load(context, "vkCmdEndQuery"); + table->vkCmdEndRenderPass = (PFN_vkCmdEndRenderPass)load(context, "vkCmdEndRenderPass"); + table->vkCmdExecuteCommands = (PFN_vkCmdExecuteCommands)load(context, "vkCmdExecuteCommands"); + table->vkCmdFillBuffer = (PFN_vkCmdFillBuffer)load(context, "vkCmdFillBuffer"); + table->vkCmdNextSubpass = (PFN_vkCmdNextSubpass)load(context, "vkCmdNextSubpass"); + table->vkCmdPipelineBarrier = (PFN_vkCmdPipelineBarrier)load(context, "vkCmdPipelineBarrier"); + table->vkCmdPushConstants = (PFN_vkCmdPushConstants)load(context, "vkCmdPushConstants"); + table->vkCmdResetEvent = (PFN_vkCmdResetEvent)load(context, "vkCmdResetEvent"); + table->vkCmdResetQueryPool = (PFN_vkCmdResetQueryPool)load(context, "vkCmdResetQueryPool"); + table->vkCmdResolveImage = (PFN_vkCmdResolveImage)load(context, "vkCmdResolveImage"); + table->vkCmdSetBlendConstants = (PFN_vkCmdSetBlendConstants)load(context, "vkCmdSetBlendConstants"); + table->vkCmdSetDepthBias = (PFN_vkCmdSetDepthBias)load(context, "vkCmdSetDepthBias"); + table->vkCmdSetDepthBounds = (PFN_vkCmdSetDepthBounds)load(context, "vkCmdSetDepthBounds"); + table->vkCmdSetEvent = (PFN_vkCmdSetEvent)load(context, "vkCmdSetEvent"); + table->vkCmdSetLineWidth = (PFN_vkCmdSetLineWidth)load(context, "vkCmdSetLineWidth"); + table->vkCmdSetScissor = (PFN_vkCmdSetScissor)load(context, "vkCmdSetScissor"); + table->vkCmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask)load(context, "vkCmdSetStencilCompareMask"); + table->vkCmdSetStencilReference = (PFN_vkCmdSetStencilReference)load(context, "vkCmdSetStencilReference"); + table->vkCmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask)load(context, "vkCmdSetStencilWriteMask"); + table->vkCmdSetViewport = (PFN_vkCmdSetViewport)load(context, "vkCmdSetViewport"); + table->vkCmdUpdateBuffer = (PFN_vkCmdUpdateBuffer)load(context, "vkCmdUpdateBuffer"); + table->vkCmdWaitEvents = (PFN_vkCmdWaitEvents)load(context, "vkCmdWaitEvents"); + table->vkCmdWriteTimestamp = (PFN_vkCmdWriteTimestamp)load(context, "vkCmdWriteTimestamp"); + table->vkCreateBuffer = (PFN_vkCreateBuffer)load(context, "vkCreateBuffer"); + table->vkCreateBufferView = (PFN_vkCreateBufferView)load(context, "vkCreateBufferView"); + table->vkCreateCommandPool = (PFN_vkCreateCommandPool)load(context, "vkCreateCommandPool"); + table->vkCreateComputePipelines = (PFN_vkCreateComputePipelines)load(context, "vkCreateComputePipelines"); + table->vkCreateDescriptorPool = (PFN_vkCreateDescriptorPool)load(context, "vkCreateDescriptorPool"); + table->vkCreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout)load(context, "vkCreateDescriptorSetLayout"); + table->vkCreateEvent = (PFN_vkCreateEvent)load(context, "vkCreateEvent"); + table->vkCreateFence = (PFN_vkCreateFence)load(context, "vkCreateFence"); + table->vkCreateFramebuffer = (PFN_vkCreateFramebuffer)load(context, "vkCreateFramebuffer"); + table->vkCreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines)load(context, "vkCreateGraphicsPipelines"); + table->vkCreateImage = (PFN_vkCreateImage)load(context, "vkCreateImage"); + table->vkCreateImageView = (PFN_vkCreateImageView)load(context, "vkCreateImageView"); + table->vkCreatePipelineCache = (PFN_vkCreatePipelineCache)load(context, "vkCreatePipelineCache"); + table->vkCreatePipelineLayout = (PFN_vkCreatePipelineLayout)load(context, "vkCreatePipelineLayout"); + table->vkCreateQueryPool = (PFN_vkCreateQueryPool)load(context, "vkCreateQueryPool"); + table->vkCreateRenderPass = (PFN_vkCreateRenderPass)load(context, "vkCreateRenderPass"); + table->vkCreateSampler = (PFN_vkCreateSampler)load(context, "vkCreateSampler"); + table->vkCreateSemaphore = (PFN_vkCreateSemaphore)load(context, "vkCreateSemaphore"); + table->vkCreateShaderModule = (PFN_vkCreateShaderModule)load(context, "vkCreateShaderModule"); + table->vkDestroyBuffer = (PFN_vkDestroyBuffer)load(context, "vkDestroyBuffer"); + table->vkDestroyBufferView = (PFN_vkDestroyBufferView)load(context, "vkDestroyBufferView"); + table->vkDestroyCommandPool = (PFN_vkDestroyCommandPool)load(context, "vkDestroyCommandPool"); + table->vkDestroyDescriptorPool = (PFN_vkDestroyDescriptorPool)load(context, "vkDestroyDescriptorPool"); + table->vkDestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout)load(context, "vkDestroyDescriptorSetLayout"); + table->vkDestroyDevice = (PFN_vkDestroyDevice)load(context, "vkDestroyDevice"); + table->vkDestroyEvent = (PFN_vkDestroyEvent)load(context, "vkDestroyEvent"); + table->vkDestroyFence = (PFN_vkDestroyFence)load(context, "vkDestroyFence"); + table->vkDestroyFramebuffer = (PFN_vkDestroyFramebuffer)load(context, "vkDestroyFramebuffer"); + table->vkDestroyImage = (PFN_vkDestroyImage)load(context, "vkDestroyImage"); + table->vkDestroyImageView = (PFN_vkDestroyImageView)load(context, "vkDestroyImageView"); + table->vkDestroyPipeline = (PFN_vkDestroyPipeline)load(context, "vkDestroyPipeline"); + table->vkDestroyPipelineCache = (PFN_vkDestroyPipelineCache)load(context, "vkDestroyPipelineCache"); + table->vkDestroyPipelineLayout = (PFN_vkDestroyPipelineLayout)load(context, "vkDestroyPipelineLayout"); + table->vkDestroyQueryPool = (PFN_vkDestroyQueryPool)load(context, "vkDestroyQueryPool"); + table->vkDestroyRenderPass = (PFN_vkDestroyRenderPass)load(context, "vkDestroyRenderPass"); + table->vkDestroySampler = (PFN_vkDestroySampler)load(context, "vkDestroySampler"); + table->vkDestroySemaphore = (PFN_vkDestroySemaphore)load(context, "vkDestroySemaphore"); + table->vkDestroyShaderModule = (PFN_vkDestroyShaderModule)load(context, "vkDestroyShaderModule"); + table->vkDeviceWaitIdle = (PFN_vkDeviceWaitIdle)load(context, "vkDeviceWaitIdle"); + table->vkEndCommandBuffer = (PFN_vkEndCommandBuffer)load(context, "vkEndCommandBuffer"); + table->vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)load(context, "vkFlushMappedMemoryRanges"); + table->vkFreeCommandBuffers = (PFN_vkFreeCommandBuffers)load(context, "vkFreeCommandBuffers"); + table->vkFreeDescriptorSets = (PFN_vkFreeDescriptorSets)load(context, "vkFreeDescriptorSets"); + table->vkFreeMemory = (PFN_vkFreeMemory)load(context, "vkFreeMemory"); + table->vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)load(context, "vkGetBufferMemoryRequirements"); + table->vkGetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment)load(context, "vkGetDeviceMemoryCommitment"); + table->vkGetDeviceQueue = (PFN_vkGetDeviceQueue)load(context, "vkGetDeviceQueue"); + table->vkGetEventStatus = (PFN_vkGetEventStatus)load(context, "vkGetEventStatus"); + table->vkGetFenceStatus = (PFN_vkGetFenceStatus)load(context, "vkGetFenceStatus"); + table->vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)load(context, "vkGetImageMemoryRequirements"); + table->vkGetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements)load(context, "vkGetImageSparseMemoryRequirements"); + table->vkGetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout)load(context, "vkGetImageSubresourceLayout"); + table->vkGetPipelineCacheData = (PFN_vkGetPipelineCacheData)load(context, "vkGetPipelineCacheData"); + table->vkGetQueryPoolResults = (PFN_vkGetQueryPoolResults)load(context, "vkGetQueryPoolResults"); + table->vkGetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity)load(context, "vkGetRenderAreaGranularity"); + table->vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)load(context, "vkInvalidateMappedMemoryRanges"); + table->vkMapMemory = (PFN_vkMapMemory)load(context, "vkMapMemory"); + table->vkMergePipelineCaches = (PFN_vkMergePipelineCaches)load(context, "vkMergePipelineCaches"); + table->vkQueueBindSparse = (PFN_vkQueueBindSparse)load(context, "vkQueueBindSparse"); + table->vkQueueSubmit = (PFN_vkQueueSubmit)load(context, "vkQueueSubmit"); + table->vkQueueWaitIdle = (PFN_vkQueueWaitIdle)load(context, "vkQueueWaitIdle"); + table->vkResetCommandBuffer = (PFN_vkResetCommandBuffer)load(context, "vkResetCommandBuffer"); + table->vkResetCommandPool = (PFN_vkResetCommandPool)load(context, "vkResetCommandPool"); + table->vkResetDescriptorPool = (PFN_vkResetDescriptorPool)load(context, "vkResetDescriptorPool"); + table->vkResetEvent = (PFN_vkResetEvent)load(context, "vkResetEvent"); + table->vkResetFences = (PFN_vkResetFences)load(context, "vkResetFences"); + table->vkSetEvent = (PFN_vkSetEvent)load(context, "vkSetEvent"); + table->vkUnmapMemory = (PFN_vkUnmapMemory)load(context, "vkUnmapMemory"); + table->vkUpdateDescriptorSets = (PFN_vkUpdateDescriptorSets)load(context, "vkUpdateDescriptorSets"); + table->vkWaitForFences = (PFN_vkWaitForFences)load(context, "vkWaitForFences"); +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_VERSION_1_1) + table->vkBindBufferMemory2 = (PFN_vkBindBufferMemory2)load(context, "vkBindBufferMemory2"); + table->vkBindImageMemory2 = (PFN_vkBindImageMemory2)load(context, "vkBindImageMemory2"); + table->vkCmdDispatchBase = (PFN_vkCmdDispatchBase)load(context, "vkCmdDispatchBase"); + table->vkCmdSetDeviceMask = (PFN_vkCmdSetDeviceMask)load(context, "vkCmdSetDeviceMask"); + table->vkCreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate)load(context, "vkCreateDescriptorUpdateTemplate"); + table->vkCreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion)load(context, "vkCreateSamplerYcbcrConversion"); + table->vkDestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate)load(context, "vkDestroyDescriptorUpdateTemplate"); + table->vkDestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion)load(context, "vkDestroySamplerYcbcrConversion"); + table->vkGetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2)load(context, "vkGetBufferMemoryRequirements2"); + table->vkGetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport)load(context, "vkGetDescriptorSetLayoutSupport"); + table->vkGetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures)load(context, "vkGetDeviceGroupPeerMemoryFeatures"); + table->vkGetDeviceQueue2 = (PFN_vkGetDeviceQueue2)load(context, "vkGetDeviceQueue2"); + table->vkGetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2)load(context, "vkGetImageMemoryRequirements2"); + table->vkGetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2)load(context, "vkGetImageSparseMemoryRequirements2"); + table->vkTrimCommandPool = (PFN_vkTrimCommandPool)load(context, "vkTrimCommandPool"); + table->vkUpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate)load(context, "vkUpdateDescriptorSetWithTemplate"); +#endif /* defined(VK_VERSION_1_1) */ +#if defined(VK_VERSION_1_2) + table->vkCmdBeginRenderPass2 = (PFN_vkCmdBeginRenderPass2)load(context, "vkCmdBeginRenderPass2"); + table->vkCmdDrawIndexedIndirectCount = (PFN_vkCmdDrawIndexedIndirectCount)load(context, "vkCmdDrawIndexedIndirectCount"); + table->vkCmdDrawIndirectCount = (PFN_vkCmdDrawIndirectCount)load(context, "vkCmdDrawIndirectCount"); + table->vkCmdEndRenderPass2 = (PFN_vkCmdEndRenderPass2)load(context, "vkCmdEndRenderPass2"); + table->vkCmdNextSubpass2 = (PFN_vkCmdNextSubpass2)load(context, "vkCmdNextSubpass2"); + table->vkCreateRenderPass2 = (PFN_vkCreateRenderPass2)load(context, "vkCreateRenderPass2"); + table->vkGetBufferDeviceAddress = (PFN_vkGetBufferDeviceAddress)load(context, "vkGetBufferDeviceAddress"); + table->vkGetBufferOpaqueCaptureAddress = (PFN_vkGetBufferOpaqueCaptureAddress)load(context, "vkGetBufferOpaqueCaptureAddress"); + table->vkGetDeviceMemoryOpaqueCaptureAddress = (PFN_vkGetDeviceMemoryOpaqueCaptureAddress)load(context, "vkGetDeviceMemoryOpaqueCaptureAddress"); + table->vkGetSemaphoreCounterValue = (PFN_vkGetSemaphoreCounterValue)load(context, "vkGetSemaphoreCounterValue"); + table->vkResetQueryPool = (PFN_vkResetQueryPool)load(context, "vkResetQueryPool"); + table->vkSignalSemaphore = (PFN_vkSignalSemaphore)load(context, "vkSignalSemaphore"); + table->vkWaitSemaphores = (PFN_vkWaitSemaphores)load(context, "vkWaitSemaphores"); +#endif /* defined(VK_VERSION_1_2) */ +#if defined(VK_VERSION_1_3) + table->vkCmdBeginRendering = (PFN_vkCmdBeginRendering)load(context, "vkCmdBeginRendering"); + table->vkCmdBindVertexBuffers2 = (PFN_vkCmdBindVertexBuffers2)load(context, "vkCmdBindVertexBuffers2"); + table->vkCmdBlitImage2 = (PFN_vkCmdBlitImage2)load(context, "vkCmdBlitImage2"); + table->vkCmdCopyBuffer2 = (PFN_vkCmdCopyBuffer2)load(context, "vkCmdCopyBuffer2"); + table->vkCmdCopyBufferToImage2 = (PFN_vkCmdCopyBufferToImage2)load(context, "vkCmdCopyBufferToImage2"); + table->vkCmdCopyImage2 = (PFN_vkCmdCopyImage2)load(context, "vkCmdCopyImage2"); + table->vkCmdCopyImageToBuffer2 = (PFN_vkCmdCopyImageToBuffer2)load(context, "vkCmdCopyImageToBuffer2"); + table->vkCmdEndRendering = (PFN_vkCmdEndRendering)load(context, "vkCmdEndRendering"); + table->vkCmdPipelineBarrier2 = (PFN_vkCmdPipelineBarrier2)load(context, "vkCmdPipelineBarrier2"); + table->vkCmdResetEvent2 = (PFN_vkCmdResetEvent2)load(context, "vkCmdResetEvent2"); + table->vkCmdResolveImage2 = (PFN_vkCmdResolveImage2)load(context, "vkCmdResolveImage2"); + table->vkCmdSetCullMode = (PFN_vkCmdSetCullMode)load(context, "vkCmdSetCullMode"); + table->vkCmdSetDepthBiasEnable = (PFN_vkCmdSetDepthBiasEnable)load(context, "vkCmdSetDepthBiasEnable"); + table->vkCmdSetDepthBoundsTestEnable = (PFN_vkCmdSetDepthBoundsTestEnable)load(context, "vkCmdSetDepthBoundsTestEnable"); + table->vkCmdSetDepthCompareOp = (PFN_vkCmdSetDepthCompareOp)load(context, "vkCmdSetDepthCompareOp"); + table->vkCmdSetDepthTestEnable = (PFN_vkCmdSetDepthTestEnable)load(context, "vkCmdSetDepthTestEnable"); + table->vkCmdSetDepthWriteEnable = (PFN_vkCmdSetDepthWriteEnable)load(context, "vkCmdSetDepthWriteEnable"); + table->vkCmdSetEvent2 = (PFN_vkCmdSetEvent2)load(context, "vkCmdSetEvent2"); + table->vkCmdSetFrontFace = (PFN_vkCmdSetFrontFace)load(context, "vkCmdSetFrontFace"); + table->vkCmdSetPrimitiveRestartEnable = (PFN_vkCmdSetPrimitiveRestartEnable)load(context, "vkCmdSetPrimitiveRestartEnable"); + table->vkCmdSetPrimitiveTopology = (PFN_vkCmdSetPrimitiveTopology)load(context, "vkCmdSetPrimitiveTopology"); + table->vkCmdSetRasterizerDiscardEnable = (PFN_vkCmdSetRasterizerDiscardEnable)load(context, "vkCmdSetRasterizerDiscardEnable"); + table->vkCmdSetScissorWithCount = (PFN_vkCmdSetScissorWithCount)load(context, "vkCmdSetScissorWithCount"); + table->vkCmdSetStencilOp = (PFN_vkCmdSetStencilOp)load(context, "vkCmdSetStencilOp"); + table->vkCmdSetStencilTestEnable = (PFN_vkCmdSetStencilTestEnable)load(context, "vkCmdSetStencilTestEnable"); + table->vkCmdSetViewportWithCount = (PFN_vkCmdSetViewportWithCount)load(context, "vkCmdSetViewportWithCount"); + table->vkCmdWaitEvents2 = (PFN_vkCmdWaitEvents2)load(context, "vkCmdWaitEvents2"); + table->vkCmdWriteTimestamp2 = (PFN_vkCmdWriteTimestamp2)load(context, "vkCmdWriteTimestamp2"); + table->vkCreatePrivateDataSlot = (PFN_vkCreatePrivateDataSlot)load(context, "vkCreatePrivateDataSlot"); + table->vkDestroyPrivateDataSlot = (PFN_vkDestroyPrivateDataSlot)load(context, "vkDestroyPrivateDataSlot"); + table->vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)load(context, "vkGetDeviceBufferMemoryRequirements"); + table->vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)load(context, "vkGetDeviceImageMemoryRequirements"); + table->vkGetDeviceImageSparseMemoryRequirements = (PFN_vkGetDeviceImageSparseMemoryRequirements)load(context, "vkGetDeviceImageSparseMemoryRequirements"); + table->vkGetPrivateData = (PFN_vkGetPrivateData)load(context, "vkGetPrivateData"); + table->vkQueueSubmit2 = (PFN_vkQueueSubmit2)load(context, "vkQueueSubmit2"); + table->vkSetPrivateData = (PFN_vkSetPrivateData)load(context, "vkSetPrivateData"); +#endif /* defined(VK_VERSION_1_3) */ +#if defined(VK_AMDX_shader_enqueue) + table->vkCmdDispatchGraphAMDX = (PFN_vkCmdDispatchGraphAMDX)load(context, "vkCmdDispatchGraphAMDX"); + table->vkCmdDispatchGraphIndirectAMDX = (PFN_vkCmdDispatchGraphIndirectAMDX)load(context, "vkCmdDispatchGraphIndirectAMDX"); + table->vkCmdDispatchGraphIndirectCountAMDX = (PFN_vkCmdDispatchGraphIndirectCountAMDX)load(context, "vkCmdDispatchGraphIndirectCountAMDX"); + table->vkCmdInitializeGraphScratchMemoryAMDX = (PFN_vkCmdInitializeGraphScratchMemoryAMDX)load(context, "vkCmdInitializeGraphScratchMemoryAMDX"); + table->vkCreateExecutionGraphPipelinesAMDX = (PFN_vkCreateExecutionGraphPipelinesAMDX)load(context, "vkCreateExecutionGraphPipelinesAMDX"); + table->vkGetExecutionGraphPipelineNodeIndexAMDX = (PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)load(context, "vkGetExecutionGraphPipelineNodeIndexAMDX"); + table->vkGetExecutionGraphPipelineScratchSizeAMDX = (PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)load(context, "vkGetExecutionGraphPipelineScratchSizeAMDX"); +#endif /* defined(VK_AMDX_shader_enqueue) */ +#if defined(VK_AMD_buffer_marker) + table->vkCmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD)load(context, "vkCmdWriteBufferMarkerAMD"); +#endif /* defined(VK_AMD_buffer_marker) */ +#if defined(VK_AMD_display_native_hdr) + table->vkSetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD)load(context, "vkSetLocalDimmingAMD"); +#endif /* defined(VK_AMD_display_native_hdr) */ +#if defined(VK_AMD_draw_indirect_count) + table->vkCmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)load(context, "vkCmdDrawIndexedIndirectCountAMD"); + table->vkCmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)load(context, "vkCmdDrawIndirectCountAMD"); +#endif /* defined(VK_AMD_draw_indirect_count) */ +#if defined(VK_AMD_shader_info) + table->vkGetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)load(context, "vkGetShaderInfoAMD"); +#endif /* defined(VK_AMD_shader_info) */ +#if defined(VK_ANDROID_external_memory_android_hardware_buffer) + table->vkGetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)load(context, "vkGetAndroidHardwareBufferPropertiesANDROID"); + table->vkGetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID)load(context, "vkGetMemoryAndroidHardwareBufferANDROID"); +#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ +#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) + table->vkCmdSetAttachmentFeedbackLoopEnableEXT = (PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT)load(context, "vkCmdSetAttachmentFeedbackLoopEnableEXT"); +#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ +#if defined(VK_EXT_buffer_device_address) + table->vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)load(context, "vkGetBufferDeviceAddressEXT"); +#endif /* defined(VK_EXT_buffer_device_address) */ +#if defined(VK_EXT_calibrated_timestamps) + table->vkGetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT)load(context, "vkGetCalibratedTimestampsEXT"); +#endif /* defined(VK_EXT_calibrated_timestamps) */ +#if defined(VK_EXT_color_write_enable) + table->vkCmdSetColorWriteEnableEXT = (PFN_vkCmdSetColorWriteEnableEXT)load(context, "vkCmdSetColorWriteEnableEXT"); +#endif /* defined(VK_EXT_color_write_enable) */ +#if defined(VK_EXT_conditional_rendering) + table->vkCmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)load(context, "vkCmdBeginConditionalRenderingEXT"); + table->vkCmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)load(context, "vkCmdEndConditionalRenderingEXT"); +#endif /* defined(VK_EXT_conditional_rendering) */ +#if defined(VK_EXT_debug_marker) + table->vkCmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT)load(context, "vkCmdDebugMarkerBeginEXT"); + table->vkCmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT)load(context, "vkCmdDebugMarkerEndEXT"); + table->vkCmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT)load(context, "vkCmdDebugMarkerInsertEXT"); + table->vkDebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)load(context, "vkDebugMarkerSetObjectNameEXT"); + table->vkDebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)load(context, "vkDebugMarkerSetObjectTagEXT"); +#endif /* defined(VK_EXT_debug_marker) */ +#if defined(VK_EXT_depth_bias_control) + table->vkCmdSetDepthBias2EXT = (PFN_vkCmdSetDepthBias2EXT)load(context, "vkCmdSetDepthBias2EXT"); +#endif /* defined(VK_EXT_depth_bias_control) */ +#if defined(VK_EXT_descriptor_buffer) + table->vkCmdBindDescriptorBufferEmbeddedSamplersEXT = (PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)load(context, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT"); + table->vkCmdBindDescriptorBuffersEXT = (PFN_vkCmdBindDescriptorBuffersEXT)load(context, "vkCmdBindDescriptorBuffersEXT"); + table->vkCmdSetDescriptorBufferOffsetsEXT = (PFN_vkCmdSetDescriptorBufferOffsetsEXT)load(context, "vkCmdSetDescriptorBufferOffsetsEXT"); + table->vkGetBufferOpaqueCaptureDescriptorDataEXT = (PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)load(context, "vkGetBufferOpaqueCaptureDescriptorDataEXT"); + table->vkGetDescriptorEXT = (PFN_vkGetDescriptorEXT)load(context, "vkGetDescriptorEXT"); + table->vkGetDescriptorSetLayoutBindingOffsetEXT = (PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)load(context, "vkGetDescriptorSetLayoutBindingOffsetEXT"); + table->vkGetDescriptorSetLayoutSizeEXT = (PFN_vkGetDescriptorSetLayoutSizeEXT)load(context, "vkGetDescriptorSetLayoutSizeEXT"); + table->vkGetImageOpaqueCaptureDescriptorDataEXT = (PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)load(context, "vkGetImageOpaqueCaptureDescriptorDataEXT"); + table->vkGetImageViewOpaqueCaptureDescriptorDataEXT = (PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)load(context, "vkGetImageViewOpaqueCaptureDescriptorDataEXT"); + table->vkGetSamplerOpaqueCaptureDescriptorDataEXT = (PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)load(context, "vkGetSamplerOpaqueCaptureDescriptorDataEXT"); +#endif /* defined(VK_EXT_descriptor_buffer) */ +#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) + table->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = (PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)load(context, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT"); +#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ +#if defined(VK_EXT_device_fault) + table->vkGetDeviceFaultInfoEXT = (PFN_vkGetDeviceFaultInfoEXT)load(context, "vkGetDeviceFaultInfoEXT"); +#endif /* defined(VK_EXT_device_fault) */ +#if defined(VK_EXT_discard_rectangles) + table->vkCmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)load(context, "vkCmdSetDiscardRectangleEXT"); +#endif /* defined(VK_EXT_discard_rectangles) */ +#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 + table->vkCmdSetDiscardRectangleEnableEXT = (PFN_vkCmdSetDiscardRectangleEnableEXT)load(context, "vkCmdSetDiscardRectangleEnableEXT"); + table->vkCmdSetDiscardRectangleModeEXT = (PFN_vkCmdSetDiscardRectangleModeEXT)load(context, "vkCmdSetDiscardRectangleModeEXT"); +#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ +#if defined(VK_EXT_display_control) + table->vkDisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT)load(context, "vkDisplayPowerControlEXT"); + table->vkGetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT)load(context, "vkGetSwapchainCounterEXT"); + table->vkRegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT)load(context, "vkRegisterDeviceEventEXT"); + table->vkRegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT)load(context, "vkRegisterDisplayEventEXT"); +#endif /* defined(VK_EXT_display_control) */ +#if defined(VK_EXT_external_memory_host) + table->vkGetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)load(context, "vkGetMemoryHostPointerPropertiesEXT"); +#endif /* defined(VK_EXT_external_memory_host) */ +#if defined(VK_EXT_full_screen_exclusive) + table->vkAcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT)load(context, "vkAcquireFullScreenExclusiveModeEXT"); + table->vkReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT)load(context, "vkReleaseFullScreenExclusiveModeEXT"); +#endif /* defined(VK_EXT_full_screen_exclusive) */ +#if defined(VK_EXT_hdr_metadata) + table->vkSetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)load(context, "vkSetHdrMetadataEXT"); +#endif /* defined(VK_EXT_hdr_metadata) */ +#if defined(VK_EXT_host_image_copy) + table->vkCopyImageToImageEXT = (PFN_vkCopyImageToImageEXT)load(context, "vkCopyImageToImageEXT"); + table->vkCopyImageToMemoryEXT = (PFN_vkCopyImageToMemoryEXT)load(context, "vkCopyImageToMemoryEXT"); + table->vkCopyMemoryToImageEXT = (PFN_vkCopyMemoryToImageEXT)load(context, "vkCopyMemoryToImageEXT"); + table->vkTransitionImageLayoutEXT = (PFN_vkTransitionImageLayoutEXT)load(context, "vkTransitionImageLayoutEXT"); +#endif /* defined(VK_EXT_host_image_copy) */ +#if defined(VK_EXT_host_query_reset) + table->vkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)load(context, "vkResetQueryPoolEXT"); +#endif /* defined(VK_EXT_host_query_reset) */ +#if defined(VK_EXT_image_drm_format_modifier) + table->vkGetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)load(context, "vkGetImageDrmFormatModifierPropertiesEXT"); +#endif /* defined(VK_EXT_image_drm_format_modifier) */ +#if defined(VK_EXT_line_rasterization) + table->vkCmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)load(context, "vkCmdSetLineStippleEXT"); +#endif /* defined(VK_EXT_line_rasterization) */ +#if defined(VK_EXT_mesh_shader) + table->vkCmdDrawMeshTasksEXT = (PFN_vkCmdDrawMeshTasksEXT)load(context, "vkCmdDrawMeshTasksEXT"); + table->vkCmdDrawMeshTasksIndirectCountEXT = (PFN_vkCmdDrawMeshTasksIndirectCountEXT)load(context, "vkCmdDrawMeshTasksIndirectCountEXT"); + table->vkCmdDrawMeshTasksIndirectEXT = (PFN_vkCmdDrawMeshTasksIndirectEXT)load(context, "vkCmdDrawMeshTasksIndirectEXT"); +#endif /* defined(VK_EXT_mesh_shader) */ +#if defined(VK_EXT_metal_objects) + table->vkExportMetalObjectsEXT = (PFN_vkExportMetalObjectsEXT)load(context, "vkExportMetalObjectsEXT"); +#endif /* defined(VK_EXT_metal_objects) */ +#if defined(VK_EXT_multi_draw) + table->vkCmdDrawMultiEXT = (PFN_vkCmdDrawMultiEXT)load(context, "vkCmdDrawMultiEXT"); + table->vkCmdDrawMultiIndexedEXT = (PFN_vkCmdDrawMultiIndexedEXT)load(context, "vkCmdDrawMultiIndexedEXT"); +#endif /* defined(VK_EXT_multi_draw) */ +#if defined(VK_EXT_opacity_micromap) + table->vkBuildMicromapsEXT = (PFN_vkBuildMicromapsEXT)load(context, "vkBuildMicromapsEXT"); + table->vkCmdBuildMicromapsEXT = (PFN_vkCmdBuildMicromapsEXT)load(context, "vkCmdBuildMicromapsEXT"); + table->vkCmdCopyMemoryToMicromapEXT = (PFN_vkCmdCopyMemoryToMicromapEXT)load(context, "vkCmdCopyMemoryToMicromapEXT"); + table->vkCmdCopyMicromapEXT = (PFN_vkCmdCopyMicromapEXT)load(context, "vkCmdCopyMicromapEXT"); + table->vkCmdCopyMicromapToMemoryEXT = (PFN_vkCmdCopyMicromapToMemoryEXT)load(context, "vkCmdCopyMicromapToMemoryEXT"); + table->vkCmdWriteMicromapsPropertiesEXT = (PFN_vkCmdWriteMicromapsPropertiesEXT)load(context, "vkCmdWriteMicromapsPropertiesEXT"); + table->vkCopyMemoryToMicromapEXT = (PFN_vkCopyMemoryToMicromapEXT)load(context, "vkCopyMemoryToMicromapEXT"); + table->vkCopyMicromapEXT = (PFN_vkCopyMicromapEXT)load(context, "vkCopyMicromapEXT"); + table->vkCopyMicromapToMemoryEXT = (PFN_vkCopyMicromapToMemoryEXT)load(context, "vkCopyMicromapToMemoryEXT"); + table->vkCreateMicromapEXT = (PFN_vkCreateMicromapEXT)load(context, "vkCreateMicromapEXT"); + table->vkDestroyMicromapEXT = (PFN_vkDestroyMicromapEXT)load(context, "vkDestroyMicromapEXT"); + table->vkGetDeviceMicromapCompatibilityEXT = (PFN_vkGetDeviceMicromapCompatibilityEXT)load(context, "vkGetDeviceMicromapCompatibilityEXT"); + table->vkGetMicromapBuildSizesEXT = (PFN_vkGetMicromapBuildSizesEXT)load(context, "vkGetMicromapBuildSizesEXT"); + table->vkWriteMicromapsPropertiesEXT = (PFN_vkWriteMicromapsPropertiesEXT)load(context, "vkWriteMicromapsPropertiesEXT"); +#endif /* defined(VK_EXT_opacity_micromap) */ +#if defined(VK_EXT_pageable_device_local_memory) + table->vkSetDeviceMemoryPriorityEXT = (PFN_vkSetDeviceMemoryPriorityEXT)load(context, "vkSetDeviceMemoryPriorityEXT"); +#endif /* defined(VK_EXT_pageable_device_local_memory) */ +#if defined(VK_EXT_pipeline_properties) + table->vkGetPipelinePropertiesEXT = (PFN_vkGetPipelinePropertiesEXT)load(context, "vkGetPipelinePropertiesEXT"); +#endif /* defined(VK_EXT_pipeline_properties) */ +#if defined(VK_EXT_private_data) + table->vkCreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT)load(context, "vkCreatePrivateDataSlotEXT"); + table->vkDestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT)load(context, "vkDestroyPrivateDataSlotEXT"); + table->vkGetPrivateDataEXT = (PFN_vkGetPrivateDataEXT)load(context, "vkGetPrivateDataEXT"); + table->vkSetPrivateDataEXT = (PFN_vkSetPrivateDataEXT)load(context, "vkSetPrivateDataEXT"); +#endif /* defined(VK_EXT_private_data) */ +#if defined(VK_EXT_sample_locations) + table->vkCmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)load(context, "vkCmdSetSampleLocationsEXT"); +#endif /* defined(VK_EXT_sample_locations) */ +#if defined(VK_EXT_shader_module_identifier) + table->vkGetShaderModuleCreateInfoIdentifierEXT = (PFN_vkGetShaderModuleCreateInfoIdentifierEXT)load(context, "vkGetShaderModuleCreateInfoIdentifierEXT"); + table->vkGetShaderModuleIdentifierEXT = (PFN_vkGetShaderModuleIdentifierEXT)load(context, "vkGetShaderModuleIdentifierEXT"); +#endif /* defined(VK_EXT_shader_module_identifier) */ +#if defined(VK_EXT_shader_object) + table->vkCmdBindShadersEXT = (PFN_vkCmdBindShadersEXT)load(context, "vkCmdBindShadersEXT"); + table->vkCreateShadersEXT = (PFN_vkCreateShadersEXT)load(context, "vkCreateShadersEXT"); + table->vkDestroyShaderEXT = (PFN_vkDestroyShaderEXT)load(context, "vkDestroyShaderEXT"); + table->vkGetShaderBinaryDataEXT = (PFN_vkGetShaderBinaryDataEXT)load(context, "vkGetShaderBinaryDataEXT"); +#endif /* defined(VK_EXT_shader_object) */ +#if defined(VK_EXT_swapchain_maintenance1) + table->vkReleaseSwapchainImagesEXT = (PFN_vkReleaseSwapchainImagesEXT)load(context, "vkReleaseSwapchainImagesEXT"); +#endif /* defined(VK_EXT_swapchain_maintenance1) */ +#if defined(VK_EXT_transform_feedback) + table->vkCmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)load(context, "vkCmdBeginQueryIndexedEXT"); + table->vkCmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)load(context, "vkCmdBeginTransformFeedbackEXT"); + table->vkCmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT)load(context, "vkCmdBindTransformFeedbackBuffersEXT"); + table->vkCmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)load(context, "vkCmdDrawIndirectByteCountEXT"); + table->vkCmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT)load(context, "vkCmdEndQueryIndexedEXT"); + table->vkCmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT)load(context, "vkCmdEndTransformFeedbackEXT"); +#endif /* defined(VK_EXT_transform_feedback) */ +#if defined(VK_EXT_validation_cache) + table->vkCreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)load(context, "vkCreateValidationCacheEXT"); + table->vkDestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)load(context, "vkDestroyValidationCacheEXT"); + table->vkGetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)load(context, "vkGetValidationCacheDataEXT"); + table->vkMergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)load(context, "vkMergeValidationCachesEXT"); +#endif /* defined(VK_EXT_validation_cache) */ +#if defined(VK_FUCHSIA_buffer_collection) + table->vkCreateBufferCollectionFUCHSIA = (PFN_vkCreateBufferCollectionFUCHSIA)load(context, "vkCreateBufferCollectionFUCHSIA"); + table->vkDestroyBufferCollectionFUCHSIA = (PFN_vkDestroyBufferCollectionFUCHSIA)load(context, "vkDestroyBufferCollectionFUCHSIA"); + table->vkGetBufferCollectionPropertiesFUCHSIA = (PFN_vkGetBufferCollectionPropertiesFUCHSIA)load(context, "vkGetBufferCollectionPropertiesFUCHSIA"); + table->vkSetBufferCollectionBufferConstraintsFUCHSIA = (PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)load(context, "vkSetBufferCollectionBufferConstraintsFUCHSIA"); + table->vkSetBufferCollectionImageConstraintsFUCHSIA = (PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)load(context, "vkSetBufferCollectionImageConstraintsFUCHSIA"); +#endif /* defined(VK_FUCHSIA_buffer_collection) */ +#if defined(VK_FUCHSIA_external_memory) + table->vkGetMemoryZirconHandleFUCHSIA = (PFN_vkGetMemoryZirconHandleFUCHSIA)load(context, "vkGetMemoryZirconHandleFUCHSIA"); + table->vkGetMemoryZirconHandlePropertiesFUCHSIA = (PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)load(context, "vkGetMemoryZirconHandlePropertiesFUCHSIA"); +#endif /* defined(VK_FUCHSIA_external_memory) */ +#if defined(VK_FUCHSIA_external_semaphore) + table->vkGetSemaphoreZirconHandleFUCHSIA = (PFN_vkGetSemaphoreZirconHandleFUCHSIA)load(context, "vkGetSemaphoreZirconHandleFUCHSIA"); + table->vkImportSemaphoreZirconHandleFUCHSIA = (PFN_vkImportSemaphoreZirconHandleFUCHSIA)load(context, "vkImportSemaphoreZirconHandleFUCHSIA"); +#endif /* defined(VK_FUCHSIA_external_semaphore) */ +#if defined(VK_GOOGLE_display_timing) + table->vkGetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE)load(context, "vkGetPastPresentationTimingGOOGLE"); + table->vkGetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE)load(context, "vkGetRefreshCycleDurationGOOGLE"); +#endif /* defined(VK_GOOGLE_display_timing) */ +#if defined(VK_HUAWEI_cluster_culling_shader) + table->vkCmdDrawClusterHUAWEI = (PFN_vkCmdDrawClusterHUAWEI)load(context, "vkCmdDrawClusterHUAWEI"); + table->vkCmdDrawClusterIndirectHUAWEI = (PFN_vkCmdDrawClusterIndirectHUAWEI)load(context, "vkCmdDrawClusterIndirectHUAWEI"); +#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ +#if defined(VK_HUAWEI_invocation_mask) + table->vkCmdBindInvocationMaskHUAWEI = (PFN_vkCmdBindInvocationMaskHUAWEI)load(context, "vkCmdBindInvocationMaskHUAWEI"); +#endif /* defined(VK_HUAWEI_invocation_mask) */ +#if defined(VK_HUAWEI_subpass_shading) + table->vkCmdSubpassShadingHUAWEI = (PFN_vkCmdSubpassShadingHUAWEI)load(context, "vkCmdSubpassShadingHUAWEI"); + table->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = (PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)load(context, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI"); +#endif /* defined(VK_HUAWEI_subpass_shading) */ +#if defined(VK_INTEL_performance_query) + table->vkAcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL)load(context, "vkAcquirePerformanceConfigurationINTEL"); + table->vkCmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL)load(context, "vkCmdSetPerformanceMarkerINTEL"); + table->vkCmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL)load(context, "vkCmdSetPerformanceOverrideINTEL"); + table->vkCmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL)load(context, "vkCmdSetPerformanceStreamMarkerINTEL"); + table->vkGetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL)load(context, "vkGetPerformanceParameterINTEL"); + table->vkInitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL)load(context, "vkInitializePerformanceApiINTEL"); + table->vkQueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL)load(context, "vkQueueSetPerformanceConfigurationINTEL"); + table->vkReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL)load(context, "vkReleasePerformanceConfigurationINTEL"); + table->vkUninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL)load(context, "vkUninitializePerformanceApiINTEL"); +#endif /* defined(VK_INTEL_performance_query) */ +#if defined(VK_KHR_acceleration_structure) + table->vkBuildAccelerationStructuresKHR = (PFN_vkBuildAccelerationStructuresKHR)load(context, "vkBuildAccelerationStructuresKHR"); + table->vkCmdBuildAccelerationStructuresIndirectKHR = (PFN_vkCmdBuildAccelerationStructuresIndirectKHR)load(context, "vkCmdBuildAccelerationStructuresIndirectKHR"); + table->vkCmdBuildAccelerationStructuresKHR = (PFN_vkCmdBuildAccelerationStructuresKHR)load(context, "vkCmdBuildAccelerationStructuresKHR"); + table->vkCmdCopyAccelerationStructureKHR = (PFN_vkCmdCopyAccelerationStructureKHR)load(context, "vkCmdCopyAccelerationStructureKHR"); + table->vkCmdCopyAccelerationStructureToMemoryKHR = (PFN_vkCmdCopyAccelerationStructureToMemoryKHR)load(context, "vkCmdCopyAccelerationStructureToMemoryKHR"); + table->vkCmdCopyMemoryToAccelerationStructureKHR = (PFN_vkCmdCopyMemoryToAccelerationStructureKHR)load(context, "vkCmdCopyMemoryToAccelerationStructureKHR"); + table->vkCmdWriteAccelerationStructuresPropertiesKHR = (PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)load(context, "vkCmdWriteAccelerationStructuresPropertiesKHR"); + table->vkCopyAccelerationStructureKHR = (PFN_vkCopyAccelerationStructureKHR)load(context, "vkCopyAccelerationStructureKHR"); + table->vkCopyAccelerationStructureToMemoryKHR = (PFN_vkCopyAccelerationStructureToMemoryKHR)load(context, "vkCopyAccelerationStructureToMemoryKHR"); + table->vkCopyMemoryToAccelerationStructureKHR = (PFN_vkCopyMemoryToAccelerationStructureKHR)load(context, "vkCopyMemoryToAccelerationStructureKHR"); + table->vkCreateAccelerationStructureKHR = (PFN_vkCreateAccelerationStructureKHR)load(context, "vkCreateAccelerationStructureKHR"); + table->vkDestroyAccelerationStructureKHR = (PFN_vkDestroyAccelerationStructureKHR)load(context, "vkDestroyAccelerationStructureKHR"); + table->vkGetAccelerationStructureBuildSizesKHR = (PFN_vkGetAccelerationStructureBuildSizesKHR)load(context, "vkGetAccelerationStructureBuildSizesKHR"); + table->vkGetAccelerationStructureDeviceAddressKHR = (PFN_vkGetAccelerationStructureDeviceAddressKHR)load(context, "vkGetAccelerationStructureDeviceAddressKHR"); + table->vkGetDeviceAccelerationStructureCompatibilityKHR = (PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)load(context, "vkGetDeviceAccelerationStructureCompatibilityKHR"); + table->vkWriteAccelerationStructuresPropertiesKHR = (PFN_vkWriteAccelerationStructuresPropertiesKHR)load(context, "vkWriteAccelerationStructuresPropertiesKHR"); +#endif /* defined(VK_KHR_acceleration_structure) */ +#if defined(VK_KHR_bind_memory2) + table->vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)load(context, "vkBindBufferMemory2KHR"); + table->vkBindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)load(context, "vkBindImageMemory2KHR"); +#endif /* defined(VK_KHR_bind_memory2) */ +#if defined(VK_KHR_buffer_device_address) + table->vkGetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR)load(context, "vkGetBufferDeviceAddressKHR"); + table->vkGetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)load(context, "vkGetBufferOpaqueCaptureAddressKHR"); + table->vkGetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)load(context, "vkGetDeviceMemoryOpaqueCaptureAddressKHR"); +#endif /* defined(VK_KHR_buffer_device_address) */ +#if defined(VK_KHR_copy_commands2) + table->vkCmdBlitImage2KHR = (PFN_vkCmdBlitImage2KHR)load(context, "vkCmdBlitImage2KHR"); + table->vkCmdCopyBuffer2KHR = (PFN_vkCmdCopyBuffer2KHR)load(context, "vkCmdCopyBuffer2KHR"); + table->vkCmdCopyBufferToImage2KHR = (PFN_vkCmdCopyBufferToImage2KHR)load(context, "vkCmdCopyBufferToImage2KHR"); + table->vkCmdCopyImage2KHR = (PFN_vkCmdCopyImage2KHR)load(context, "vkCmdCopyImage2KHR"); + table->vkCmdCopyImageToBuffer2KHR = (PFN_vkCmdCopyImageToBuffer2KHR)load(context, "vkCmdCopyImageToBuffer2KHR"); + table->vkCmdResolveImage2KHR = (PFN_vkCmdResolveImage2KHR)load(context, "vkCmdResolveImage2KHR"); +#endif /* defined(VK_KHR_copy_commands2) */ +#if defined(VK_KHR_create_renderpass2) + table->vkCmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR)load(context, "vkCmdBeginRenderPass2KHR"); + table->vkCmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)load(context, "vkCmdEndRenderPass2KHR"); + table->vkCmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)load(context, "vkCmdNextSubpass2KHR"); + table->vkCreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)load(context, "vkCreateRenderPass2KHR"); +#endif /* defined(VK_KHR_create_renderpass2) */ +#if defined(VK_KHR_deferred_host_operations) + table->vkCreateDeferredOperationKHR = (PFN_vkCreateDeferredOperationKHR)load(context, "vkCreateDeferredOperationKHR"); + table->vkDeferredOperationJoinKHR = (PFN_vkDeferredOperationJoinKHR)load(context, "vkDeferredOperationJoinKHR"); + table->vkDestroyDeferredOperationKHR = (PFN_vkDestroyDeferredOperationKHR)load(context, "vkDestroyDeferredOperationKHR"); + table->vkGetDeferredOperationMaxConcurrencyKHR = (PFN_vkGetDeferredOperationMaxConcurrencyKHR)load(context, "vkGetDeferredOperationMaxConcurrencyKHR"); + table->vkGetDeferredOperationResultKHR = (PFN_vkGetDeferredOperationResultKHR)load(context, "vkGetDeferredOperationResultKHR"); +#endif /* defined(VK_KHR_deferred_host_operations) */ +#if defined(VK_KHR_descriptor_update_template) + table->vkCreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR)load(context, "vkCreateDescriptorUpdateTemplateKHR"); + table->vkDestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR)load(context, "vkDestroyDescriptorUpdateTemplateKHR"); + table->vkUpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR)load(context, "vkUpdateDescriptorSetWithTemplateKHR"); +#endif /* defined(VK_KHR_descriptor_update_template) */ +#if defined(VK_KHR_device_group) + table->vkCmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR)load(context, "vkCmdDispatchBaseKHR"); + table->vkCmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR)load(context, "vkCmdSetDeviceMaskKHR"); + table->vkGetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)load(context, "vkGetDeviceGroupPeerMemoryFeaturesKHR"); +#endif /* defined(VK_KHR_device_group) */ +#if defined(VK_KHR_display_swapchain) + table->vkCreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR)load(context, "vkCreateSharedSwapchainsKHR"); +#endif /* defined(VK_KHR_display_swapchain) */ +#if defined(VK_KHR_draw_indirect_count) + table->vkCmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR)load(context, "vkCmdDrawIndexedIndirectCountKHR"); + table->vkCmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR)load(context, "vkCmdDrawIndirectCountKHR"); +#endif /* defined(VK_KHR_draw_indirect_count) */ +#if defined(VK_KHR_dynamic_rendering) + table->vkCmdBeginRenderingKHR = (PFN_vkCmdBeginRenderingKHR)load(context, "vkCmdBeginRenderingKHR"); + table->vkCmdEndRenderingKHR = (PFN_vkCmdEndRenderingKHR)load(context, "vkCmdEndRenderingKHR"); +#endif /* defined(VK_KHR_dynamic_rendering) */ +#if defined(VK_KHR_external_fence_fd) + table->vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)load(context, "vkGetFenceFdKHR"); + table->vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)load(context, "vkImportFenceFdKHR"); +#endif /* defined(VK_KHR_external_fence_fd) */ +#if defined(VK_KHR_external_fence_win32) + table->vkGetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR)load(context, "vkGetFenceWin32HandleKHR"); + table->vkImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR)load(context, "vkImportFenceWin32HandleKHR"); +#endif /* defined(VK_KHR_external_fence_win32) */ +#if defined(VK_KHR_external_memory_fd) + table->vkGetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)load(context, "vkGetMemoryFdKHR"); + table->vkGetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR)load(context, "vkGetMemoryFdPropertiesKHR"); +#endif /* defined(VK_KHR_external_memory_fd) */ +#if defined(VK_KHR_external_memory_win32) + table->vkGetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)load(context, "vkGetMemoryWin32HandleKHR"); + table->vkGetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR)load(context, "vkGetMemoryWin32HandlePropertiesKHR"); +#endif /* defined(VK_KHR_external_memory_win32) */ +#if defined(VK_KHR_external_semaphore_fd) + table->vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)load(context, "vkGetSemaphoreFdKHR"); + table->vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)load(context, "vkImportSemaphoreFdKHR"); +#endif /* defined(VK_KHR_external_semaphore_fd) */ +#if defined(VK_KHR_external_semaphore_win32) + table->vkGetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR)load(context, "vkGetSemaphoreWin32HandleKHR"); + table->vkImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR)load(context, "vkImportSemaphoreWin32HandleKHR"); +#endif /* defined(VK_KHR_external_semaphore_win32) */ +#if defined(VK_KHR_fragment_shading_rate) + table->vkCmdSetFragmentShadingRateKHR = (PFN_vkCmdSetFragmentShadingRateKHR)load(context, "vkCmdSetFragmentShadingRateKHR"); +#endif /* defined(VK_KHR_fragment_shading_rate) */ +#if defined(VK_KHR_get_memory_requirements2) + table->vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)load(context, "vkGetBufferMemoryRequirements2KHR"); + table->vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)load(context, "vkGetImageMemoryRequirements2KHR"); + table->vkGetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)load(context, "vkGetImageSparseMemoryRequirements2KHR"); +#endif /* defined(VK_KHR_get_memory_requirements2) */ +#if defined(VK_KHR_maintenance1) + table->vkTrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR)load(context, "vkTrimCommandPoolKHR"); +#endif /* defined(VK_KHR_maintenance1) */ +#if defined(VK_KHR_maintenance3) + table->vkGetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR)load(context, "vkGetDescriptorSetLayoutSupportKHR"); +#endif /* defined(VK_KHR_maintenance3) */ +#if defined(VK_KHR_maintenance4) + table->vkGetDeviceBufferMemoryRequirementsKHR = (PFN_vkGetDeviceBufferMemoryRequirementsKHR)load(context, "vkGetDeviceBufferMemoryRequirementsKHR"); + table->vkGetDeviceImageMemoryRequirementsKHR = (PFN_vkGetDeviceImageMemoryRequirementsKHR)load(context, "vkGetDeviceImageMemoryRequirementsKHR"); + table->vkGetDeviceImageSparseMemoryRequirementsKHR = (PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)load(context, "vkGetDeviceImageSparseMemoryRequirementsKHR"); +#endif /* defined(VK_KHR_maintenance4) */ +#if defined(VK_KHR_maintenance5) + table->vkCmdBindIndexBuffer2KHR = (PFN_vkCmdBindIndexBuffer2KHR)load(context, "vkCmdBindIndexBuffer2KHR"); + table->vkGetDeviceImageSubresourceLayoutKHR = (PFN_vkGetDeviceImageSubresourceLayoutKHR)load(context, "vkGetDeviceImageSubresourceLayoutKHR"); + table->vkGetImageSubresourceLayout2KHR = (PFN_vkGetImageSubresourceLayout2KHR)load(context, "vkGetImageSubresourceLayout2KHR"); + table->vkGetRenderingAreaGranularityKHR = (PFN_vkGetRenderingAreaGranularityKHR)load(context, "vkGetRenderingAreaGranularityKHR"); +#endif /* defined(VK_KHR_maintenance5) */ +#if defined(VK_KHR_map_memory2) + table->vkMapMemory2KHR = (PFN_vkMapMemory2KHR)load(context, "vkMapMemory2KHR"); + table->vkUnmapMemory2KHR = (PFN_vkUnmapMemory2KHR)load(context, "vkUnmapMemory2KHR"); +#endif /* defined(VK_KHR_map_memory2) */ +#if defined(VK_KHR_performance_query) + table->vkAcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)load(context, "vkAcquireProfilingLockKHR"); + table->vkReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)load(context, "vkReleaseProfilingLockKHR"); +#endif /* defined(VK_KHR_performance_query) */ +#if defined(VK_KHR_pipeline_executable_properties) + table->vkGetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)load(context, "vkGetPipelineExecutableInternalRepresentationsKHR"); + table->vkGetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)load(context, "vkGetPipelineExecutablePropertiesKHR"); + table->vkGetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)load(context, "vkGetPipelineExecutableStatisticsKHR"); +#endif /* defined(VK_KHR_pipeline_executable_properties) */ +#if defined(VK_KHR_present_wait) + table->vkWaitForPresentKHR = (PFN_vkWaitForPresentKHR)load(context, "vkWaitForPresentKHR"); +#endif /* defined(VK_KHR_present_wait) */ +#if defined(VK_KHR_push_descriptor) + table->vkCmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)load(context, "vkCmdPushDescriptorSetKHR"); +#endif /* defined(VK_KHR_push_descriptor) */ +#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) + table->vkCmdTraceRaysIndirect2KHR = (PFN_vkCmdTraceRaysIndirect2KHR)load(context, "vkCmdTraceRaysIndirect2KHR"); +#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_ray_tracing_pipeline) + table->vkCmdSetRayTracingPipelineStackSizeKHR = (PFN_vkCmdSetRayTracingPipelineStackSizeKHR)load(context, "vkCmdSetRayTracingPipelineStackSizeKHR"); + table->vkCmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR)load(context, "vkCmdTraceRaysIndirectKHR"); + table->vkCmdTraceRaysKHR = (PFN_vkCmdTraceRaysKHR)load(context, "vkCmdTraceRaysKHR"); + table->vkCreateRayTracingPipelinesKHR = (PFN_vkCreateRayTracingPipelinesKHR)load(context, "vkCreateRayTracingPipelinesKHR"); + table->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = (PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)load(context, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR"); + table->vkGetRayTracingShaderGroupHandlesKHR = (PFN_vkGetRayTracingShaderGroupHandlesKHR)load(context, "vkGetRayTracingShaderGroupHandlesKHR"); + table->vkGetRayTracingShaderGroupStackSizeKHR = (PFN_vkGetRayTracingShaderGroupStackSizeKHR)load(context, "vkGetRayTracingShaderGroupStackSizeKHR"); +#endif /* defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_sampler_ycbcr_conversion) + table->vkCreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)load(context, "vkCreateSamplerYcbcrConversionKHR"); + table->vkDestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)load(context, "vkDestroySamplerYcbcrConversionKHR"); +#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ +#if defined(VK_KHR_shared_presentable_image) + table->vkGetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR)load(context, "vkGetSwapchainStatusKHR"); +#endif /* defined(VK_KHR_shared_presentable_image) */ +#if defined(VK_KHR_swapchain) + table->vkAcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)load(context, "vkAcquireNextImageKHR"); + table->vkCreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)load(context, "vkCreateSwapchainKHR"); + table->vkDestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)load(context, "vkDestroySwapchainKHR"); + table->vkGetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)load(context, "vkGetSwapchainImagesKHR"); + table->vkQueuePresentKHR = (PFN_vkQueuePresentKHR)load(context, "vkQueuePresentKHR"); +#endif /* defined(VK_KHR_swapchain) */ +#if defined(VK_KHR_synchronization2) + table->vkCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)load(context, "vkCmdPipelineBarrier2KHR"); + table->vkCmdResetEvent2KHR = (PFN_vkCmdResetEvent2KHR)load(context, "vkCmdResetEvent2KHR"); + table->vkCmdSetEvent2KHR = (PFN_vkCmdSetEvent2KHR)load(context, "vkCmdSetEvent2KHR"); + table->vkCmdWaitEvents2KHR = (PFN_vkCmdWaitEvents2KHR)load(context, "vkCmdWaitEvents2KHR"); + table->vkCmdWriteTimestamp2KHR = (PFN_vkCmdWriteTimestamp2KHR)load(context, "vkCmdWriteTimestamp2KHR"); + table->vkQueueSubmit2KHR = (PFN_vkQueueSubmit2KHR)load(context, "vkQueueSubmit2KHR"); +#endif /* defined(VK_KHR_synchronization2) */ +#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) + table->vkCmdWriteBufferMarker2AMD = (PFN_vkCmdWriteBufferMarker2AMD)load(context, "vkCmdWriteBufferMarker2AMD"); +#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ +#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) + table->vkGetQueueCheckpointData2NV = (PFN_vkGetQueueCheckpointData2NV)load(context, "vkGetQueueCheckpointData2NV"); +#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_KHR_timeline_semaphore) + table->vkGetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR)load(context, "vkGetSemaphoreCounterValueKHR"); + table->vkSignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)load(context, "vkSignalSemaphoreKHR"); + table->vkWaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)load(context, "vkWaitSemaphoresKHR"); +#endif /* defined(VK_KHR_timeline_semaphore) */ +#if defined(VK_KHR_video_decode_queue) + table->vkCmdDecodeVideoKHR = (PFN_vkCmdDecodeVideoKHR)load(context, "vkCmdDecodeVideoKHR"); +#endif /* defined(VK_KHR_video_decode_queue) */ +#if defined(VK_KHR_video_encode_queue) + table->vkCmdEncodeVideoKHR = (PFN_vkCmdEncodeVideoKHR)load(context, "vkCmdEncodeVideoKHR"); + table->vkGetEncodedVideoSessionParametersKHR = (PFN_vkGetEncodedVideoSessionParametersKHR)load(context, "vkGetEncodedVideoSessionParametersKHR"); +#endif /* defined(VK_KHR_video_encode_queue) */ +#if defined(VK_KHR_video_queue) + table->vkBindVideoSessionMemoryKHR = (PFN_vkBindVideoSessionMemoryKHR)load(context, "vkBindVideoSessionMemoryKHR"); + table->vkCmdBeginVideoCodingKHR = (PFN_vkCmdBeginVideoCodingKHR)load(context, "vkCmdBeginVideoCodingKHR"); + table->vkCmdControlVideoCodingKHR = (PFN_vkCmdControlVideoCodingKHR)load(context, "vkCmdControlVideoCodingKHR"); + table->vkCmdEndVideoCodingKHR = (PFN_vkCmdEndVideoCodingKHR)load(context, "vkCmdEndVideoCodingKHR"); + table->vkCreateVideoSessionKHR = (PFN_vkCreateVideoSessionKHR)load(context, "vkCreateVideoSessionKHR"); + table->vkCreateVideoSessionParametersKHR = (PFN_vkCreateVideoSessionParametersKHR)load(context, "vkCreateVideoSessionParametersKHR"); + table->vkDestroyVideoSessionKHR = (PFN_vkDestroyVideoSessionKHR)load(context, "vkDestroyVideoSessionKHR"); + table->vkDestroyVideoSessionParametersKHR = (PFN_vkDestroyVideoSessionParametersKHR)load(context, "vkDestroyVideoSessionParametersKHR"); + table->vkGetVideoSessionMemoryRequirementsKHR = (PFN_vkGetVideoSessionMemoryRequirementsKHR)load(context, "vkGetVideoSessionMemoryRequirementsKHR"); + table->vkUpdateVideoSessionParametersKHR = (PFN_vkUpdateVideoSessionParametersKHR)load(context, "vkUpdateVideoSessionParametersKHR"); +#endif /* defined(VK_KHR_video_queue) */ +#if defined(VK_NVX_binary_import) + table->vkCmdCuLaunchKernelNVX = (PFN_vkCmdCuLaunchKernelNVX)load(context, "vkCmdCuLaunchKernelNVX"); + table->vkCreateCuFunctionNVX = (PFN_vkCreateCuFunctionNVX)load(context, "vkCreateCuFunctionNVX"); + table->vkCreateCuModuleNVX = (PFN_vkCreateCuModuleNVX)load(context, "vkCreateCuModuleNVX"); + table->vkDestroyCuFunctionNVX = (PFN_vkDestroyCuFunctionNVX)load(context, "vkDestroyCuFunctionNVX"); + table->vkDestroyCuModuleNVX = (PFN_vkDestroyCuModuleNVX)load(context, "vkDestroyCuModuleNVX"); +#endif /* defined(VK_NVX_binary_import) */ +#if defined(VK_NVX_image_view_handle) + table->vkGetImageViewAddressNVX = (PFN_vkGetImageViewAddressNVX)load(context, "vkGetImageViewAddressNVX"); + table->vkGetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)load(context, "vkGetImageViewHandleNVX"); +#endif /* defined(VK_NVX_image_view_handle) */ +#if defined(VK_NV_clip_space_w_scaling) + table->vkCmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)load(context, "vkCmdSetViewportWScalingNV"); +#endif /* defined(VK_NV_clip_space_w_scaling) */ +#if defined(VK_NV_copy_memory_indirect) + table->vkCmdCopyMemoryIndirectNV = (PFN_vkCmdCopyMemoryIndirectNV)load(context, "vkCmdCopyMemoryIndirectNV"); + table->vkCmdCopyMemoryToImageIndirectNV = (PFN_vkCmdCopyMemoryToImageIndirectNV)load(context, "vkCmdCopyMemoryToImageIndirectNV"); +#endif /* defined(VK_NV_copy_memory_indirect) */ +#if defined(VK_NV_cuda_kernel_launch) + table->vkCmdCudaLaunchKernelNV = (PFN_vkCmdCudaLaunchKernelNV)load(context, "vkCmdCudaLaunchKernelNV"); + table->vkCreateCudaFunctionNV = (PFN_vkCreateCudaFunctionNV)load(context, "vkCreateCudaFunctionNV"); + table->vkCreateCudaModuleNV = (PFN_vkCreateCudaModuleNV)load(context, "vkCreateCudaModuleNV"); + table->vkDestroyCudaFunctionNV = (PFN_vkDestroyCudaFunctionNV)load(context, "vkDestroyCudaFunctionNV"); + table->vkDestroyCudaModuleNV = (PFN_vkDestroyCudaModuleNV)load(context, "vkDestroyCudaModuleNV"); + table->vkGetCudaModuleCacheNV = (PFN_vkGetCudaModuleCacheNV)load(context, "vkGetCudaModuleCacheNV"); +#endif /* defined(VK_NV_cuda_kernel_launch) */ +#if defined(VK_NV_device_diagnostic_checkpoints) + table->vkCmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV)load(context, "vkCmdSetCheckpointNV"); + table->vkGetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV)load(context, "vkGetQueueCheckpointDataNV"); +#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_NV_device_generated_commands) + table->vkCmdBindPipelineShaderGroupNV = (PFN_vkCmdBindPipelineShaderGroupNV)load(context, "vkCmdBindPipelineShaderGroupNV"); + table->vkCmdExecuteGeneratedCommandsNV = (PFN_vkCmdExecuteGeneratedCommandsNV)load(context, "vkCmdExecuteGeneratedCommandsNV"); + table->vkCmdPreprocessGeneratedCommandsNV = (PFN_vkCmdPreprocessGeneratedCommandsNV)load(context, "vkCmdPreprocessGeneratedCommandsNV"); + table->vkCreateIndirectCommandsLayoutNV = (PFN_vkCreateIndirectCommandsLayoutNV)load(context, "vkCreateIndirectCommandsLayoutNV"); + table->vkDestroyIndirectCommandsLayoutNV = (PFN_vkDestroyIndirectCommandsLayoutNV)load(context, "vkDestroyIndirectCommandsLayoutNV"); + table->vkGetGeneratedCommandsMemoryRequirementsNV = (PFN_vkGetGeneratedCommandsMemoryRequirementsNV)load(context, "vkGetGeneratedCommandsMemoryRequirementsNV"); +#endif /* defined(VK_NV_device_generated_commands) */ +#if defined(VK_NV_device_generated_commands_compute) + table->vkCmdUpdatePipelineIndirectBufferNV = (PFN_vkCmdUpdatePipelineIndirectBufferNV)load(context, "vkCmdUpdatePipelineIndirectBufferNV"); + table->vkGetPipelineIndirectDeviceAddressNV = (PFN_vkGetPipelineIndirectDeviceAddressNV)load(context, "vkGetPipelineIndirectDeviceAddressNV"); + table->vkGetPipelineIndirectMemoryRequirementsNV = (PFN_vkGetPipelineIndirectMemoryRequirementsNV)load(context, "vkGetPipelineIndirectMemoryRequirementsNV"); +#endif /* defined(VK_NV_device_generated_commands_compute) */ +#if defined(VK_NV_external_memory_rdma) + table->vkGetMemoryRemoteAddressNV = (PFN_vkGetMemoryRemoteAddressNV)load(context, "vkGetMemoryRemoteAddressNV"); +#endif /* defined(VK_NV_external_memory_rdma) */ +#if defined(VK_NV_external_memory_win32) + table->vkGetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)load(context, "vkGetMemoryWin32HandleNV"); +#endif /* defined(VK_NV_external_memory_win32) */ +#if defined(VK_NV_fragment_shading_rate_enums) + table->vkCmdSetFragmentShadingRateEnumNV = (PFN_vkCmdSetFragmentShadingRateEnumNV)load(context, "vkCmdSetFragmentShadingRateEnumNV"); +#endif /* defined(VK_NV_fragment_shading_rate_enums) */ +#if defined(VK_NV_low_latency2) + table->vkGetLatencyTimingsNV = (PFN_vkGetLatencyTimingsNV)load(context, "vkGetLatencyTimingsNV"); + table->vkLatencySleepNV = (PFN_vkLatencySleepNV)load(context, "vkLatencySleepNV"); + table->vkQueueNotifyOutOfBandNV = (PFN_vkQueueNotifyOutOfBandNV)load(context, "vkQueueNotifyOutOfBandNV"); + table->vkSetLatencyMarkerNV = (PFN_vkSetLatencyMarkerNV)load(context, "vkSetLatencyMarkerNV"); + table->vkSetLatencySleepModeNV = (PFN_vkSetLatencySleepModeNV)load(context, "vkSetLatencySleepModeNV"); +#endif /* defined(VK_NV_low_latency2) */ +#if defined(VK_NV_memory_decompression) + table->vkCmdDecompressMemoryIndirectCountNV = (PFN_vkCmdDecompressMemoryIndirectCountNV)load(context, "vkCmdDecompressMemoryIndirectCountNV"); + table->vkCmdDecompressMemoryNV = (PFN_vkCmdDecompressMemoryNV)load(context, "vkCmdDecompressMemoryNV"); +#endif /* defined(VK_NV_memory_decompression) */ +#if defined(VK_NV_mesh_shader) + table->vkCmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV)load(context, "vkCmdDrawMeshTasksIndirectCountNV"); + table->vkCmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)load(context, "vkCmdDrawMeshTasksIndirectNV"); + table->vkCmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)load(context, "vkCmdDrawMeshTasksNV"); +#endif /* defined(VK_NV_mesh_shader) */ +#if defined(VK_NV_optical_flow) + table->vkBindOpticalFlowSessionImageNV = (PFN_vkBindOpticalFlowSessionImageNV)load(context, "vkBindOpticalFlowSessionImageNV"); + table->vkCmdOpticalFlowExecuteNV = (PFN_vkCmdOpticalFlowExecuteNV)load(context, "vkCmdOpticalFlowExecuteNV"); + table->vkCreateOpticalFlowSessionNV = (PFN_vkCreateOpticalFlowSessionNV)load(context, "vkCreateOpticalFlowSessionNV"); + table->vkDestroyOpticalFlowSessionNV = (PFN_vkDestroyOpticalFlowSessionNV)load(context, "vkDestroyOpticalFlowSessionNV"); +#endif /* defined(VK_NV_optical_flow) */ +#if defined(VK_NV_ray_tracing) + table->vkBindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)load(context, "vkBindAccelerationStructureMemoryNV"); + table->vkCmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)load(context, "vkCmdBuildAccelerationStructureNV"); + table->vkCmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)load(context, "vkCmdCopyAccelerationStructureNV"); + table->vkCmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)load(context, "vkCmdTraceRaysNV"); + table->vkCmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)load(context, "vkCmdWriteAccelerationStructuresPropertiesNV"); + table->vkCompileDeferredNV = (PFN_vkCompileDeferredNV)load(context, "vkCompileDeferredNV"); + table->vkCreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)load(context, "vkCreateAccelerationStructureNV"); + table->vkCreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)load(context, "vkCreateRayTracingPipelinesNV"); + table->vkDestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)load(context, "vkDestroyAccelerationStructureNV"); + table->vkGetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)load(context, "vkGetAccelerationStructureHandleNV"); + table->vkGetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)load(context, "vkGetAccelerationStructureMemoryRequirementsNV"); + table->vkGetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)load(context, "vkGetRayTracingShaderGroupHandlesNV"); +#endif /* defined(VK_NV_ray_tracing) */ +#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 + table->vkCmdSetExclusiveScissorEnableNV = (PFN_vkCmdSetExclusiveScissorEnableNV)load(context, "vkCmdSetExclusiveScissorEnableNV"); +#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ +#if defined(VK_NV_scissor_exclusive) + table->vkCmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV)load(context, "vkCmdSetExclusiveScissorNV"); +#endif /* defined(VK_NV_scissor_exclusive) */ +#if defined(VK_NV_shading_rate_image) + table->vkCmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV)load(context, "vkCmdBindShadingRateImageNV"); + table->vkCmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)load(context, "vkCmdSetCoarseSampleOrderNV"); + table->vkCmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)load(context, "vkCmdSetViewportShadingRatePaletteNV"); +#endif /* defined(VK_NV_shading_rate_image) */ +#if defined(VK_QCOM_tile_properties) + table->vkGetDynamicRenderingTilePropertiesQCOM = (PFN_vkGetDynamicRenderingTilePropertiesQCOM)load(context, "vkGetDynamicRenderingTilePropertiesQCOM"); + table->vkGetFramebufferTilePropertiesQCOM = (PFN_vkGetFramebufferTilePropertiesQCOM)load(context, "vkGetFramebufferTilePropertiesQCOM"); +#endif /* defined(VK_QCOM_tile_properties) */ +#if defined(VK_QNX_external_memory_screen_buffer) + table->vkGetScreenBufferPropertiesQNX = (PFN_vkGetScreenBufferPropertiesQNX)load(context, "vkGetScreenBufferPropertiesQNX"); +#endif /* defined(VK_QNX_external_memory_screen_buffer) */ +#if defined(VK_VALVE_descriptor_set_host_mapping) + table->vkGetDescriptorSetHostMappingVALVE = (PFN_vkGetDescriptorSetHostMappingVALVE)load(context, "vkGetDescriptorSetHostMappingVALVE"); + table->vkGetDescriptorSetLayoutHostMappingInfoVALVE = (PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)load(context, "vkGetDescriptorSetLayoutHostMappingInfoVALVE"); +#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + table->vkCmdBindVertexBuffers2EXT = (PFN_vkCmdBindVertexBuffers2EXT)load(context, "vkCmdBindVertexBuffers2EXT"); + table->vkCmdSetCullModeEXT = (PFN_vkCmdSetCullModeEXT)load(context, "vkCmdSetCullModeEXT"); + table->vkCmdSetDepthBoundsTestEnableEXT = (PFN_vkCmdSetDepthBoundsTestEnableEXT)load(context, "vkCmdSetDepthBoundsTestEnableEXT"); + table->vkCmdSetDepthCompareOpEXT = (PFN_vkCmdSetDepthCompareOpEXT)load(context, "vkCmdSetDepthCompareOpEXT"); + table->vkCmdSetDepthTestEnableEXT = (PFN_vkCmdSetDepthTestEnableEXT)load(context, "vkCmdSetDepthTestEnableEXT"); + table->vkCmdSetDepthWriteEnableEXT = (PFN_vkCmdSetDepthWriteEnableEXT)load(context, "vkCmdSetDepthWriteEnableEXT"); + table->vkCmdSetFrontFaceEXT = (PFN_vkCmdSetFrontFaceEXT)load(context, "vkCmdSetFrontFaceEXT"); + table->vkCmdSetPrimitiveTopologyEXT = (PFN_vkCmdSetPrimitiveTopologyEXT)load(context, "vkCmdSetPrimitiveTopologyEXT"); + table->vkCmdSetScissorWithCountEXT = (PFN_vkCmdSetScissorWithCountEXT)load(context, "vkCmdSetScissorWithCountEXT"); + table->vkCmdSetStencilOpEXT = (PFN_vkCmdSetStencilOpEXT)load(context, "vkCmdSetStencilOpEXT"); + table->vkCmdSetStencilTestEnableEXT = (PFN_vkCmdSetStencilTestEnableEXT)load(context, "vkCmdSetStencilTestEnableEXT"); + table->vkCmdSetViewportWithCountEXT = (PFN_vkCmdSetViewportWithCountEXT)load(context, "vkCmdSetViewportWithCountEXT"); +#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + table->vkCmdSetDepthBiasEnableEXT = (PFN_vkCmdSetDepthBiasEnableEXT)load(context, "vkCmdSetDepthBiasEnableEXT"); + table->vkCmdSetLogicOpEXT = (PFN_vkCmdSetLogicOpEXT)load(context, "vkCmdSetLogicOpEXT"); + table->vkCmdSetPatchControlPointsEXT = (PFN_vkCmdSetPatchControlPointsEXT)load(context, "vkCmdSetPatchControlPointsEXT"); + table->vkCmdSetPrimitiveRestartEnableEXT = (PFN_vkCmdSetPrimitiveRestartEnableEXT)load(context, "vkCmdSetPrimitiveRestartEnableEXT"); + table->vkCmdSetRasterizerDiscardEnableEXT = (PFN_vkCmdSetRasterizerDiscardEnableEXT)load(context, "vkCmdSetRasterizerDiscardEnableEXT"); +#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + table->vkCmdSetAlphaToCoverageEnableEXT = (PFN_vkCmdSetAlphaToCoverageEnableEXT)load(context, "vkCmdSetAlphaToCoverageEnableEXT"); + table->vkCmdSetAlphaToOneEnableEXT = (PFN_vkCmdSetAlphaToOneEnableEXT)load(context, "vkCmdSetAlphaToOneEnableEXT"); + table->vkCmdSetColorBlendAdvancedEXT = (PFN_vkCmdSetColorBlendAdvancedEXT)load(context, "vkCmdSetColorBlendAdvancedEXT"); + table->vkCmdSetColorBlendEnableEXT = (PFN_vkCmdSetColorBlendEnableEXT)load(context, "vkCmdSetColorBlendEnableEXT"); + table->vkCmdSetColorBlendEquationEXT = (PFN_vkCmdSetColorBlendEquationEXT)load(context, "vkCmdSetColorBlendEquationEXT"); + table->vkCmdSetColorWriteMaskEXT = (PFN_vkCmdSetColorWriteMaskEXT)load(context, "vkCmdSetColorWriteMaskEXT"); + table->vkCmdSetConservativeRasterizationModeEXT = (PFN_vkCmdSetConservativeRasterizationModeEXT)load(context, "vkCmdSetConservativeRasterizationModeEXT"); + table->vkCmdSetDepthClampEnableEXT = (PFN_vkCmdSetDepthClampEnableEXT)load(context, "vkCmdSetDepthClampEnableEXT"); + table->vkCmdSetDepthClipEnableEXT = (PFN_vkCmdSetDepthClipEnableEXT)load(context, "vkCmdSetDepthClipEnableEXT"); + table->vkCmdSetDepthClipNegativeOneToOneEXT = (PFN_vkCmdSetDepthClipNegativeOneToOneEXT)load(context, "vkCmdSetDepthClipNegativeOneToOneEXT"); + table->vkCmdSetExtraPrimitiveOverestimationSizeEXT = (PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)load(context, "vkCmdSetExtraPrimitiveOverestimationSizeEXT"); + table->vkCmdSetLineRasterizationModeEXT = (PFN_vkCmdSetLineRasterizationModeEXT)load(context, "vkCmdSetLineRasterizationModeEXT"); + table->vkCmdSetLineStippleEnableEXT = (PFN_vkCmdSetLineStippleEnableEXT)load(context, "vkCmdSetLineStippleEnableEXT"); + table->vkCmdSetLogicOpEnableEXT = (PFN_vkCmdSetLogicOpEnableEXT)load(context, "vkCmdSetLogicOpEnableEXT"); + table->vkCmdSetPolygonModeEXT = (PFN_vkCmdSetPolygonModeEXT)load(context, "vkCmdSetPolygonModeEXT"); + table->vkCmdSetProvokingVertexModeEXT = (PFN_vkCmdSetProvokingVertexModeEXT)load(context, "vkCmdSetProvokingVertexModeEXT"); + table->vkCmdSetRasterizationSamplesEXT = (PFN_vkCmdSetRasterizationSamplesEXT)load(context, "vkCmdSetRasterizationSamplesEXT"); + table->vkCmdSetRasterizationStreamEXT = (PFN_vkCmdSetRasterizationStreamEXT)load(context, "vkCmdSetRasterizationStreamEXT"); + table->vkCmdSetSampleLocationsEnableEXT = (PFN_vkCmdSetSampleLocationsEnableEXT)load(context, "vkCmdSetSampleLocationsEnableEXT"); + table->vkCmdSetSampleMaskEXT = (PFN_vkCmdSetSampleMaskEXT)load(context, "vkCmdSetSampleMaskEXT"); + table->vkCmdSetTessellationDomainOriginEXT = (PFN_vkCmdSetTessellationDomainOriginEXT)load(context, "vkCmdSetTessellationDomainOriginEXT"); +#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) + table->vkCmdSetViewportWScalingEnableNV = (PFN_vkCmdSetViewportWScalingEnableNV)load(context, "vkCmdSetViewportWScalingEnableNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) + table->vkCmdSetViewportSwizzleNV = (PFN_vkCmdSetViewportSwizzleNV)load(context, "vkCmdSetViewportSwizzleNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) + table->vkCmdSetCoverageToColorEnableNV = (PFN_vkCmdSetCoverageToColorEnableNV)load(context, "vkCmdSetCoverageToColorEnableNV"); + table->vkCmdSetCoverageToColorLocationNV = (PFN_vkCmdSetCoverageToColorLocationNV)load(context, "vkCmdSetCoverageToColorLocationNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) + table->vkCmdSetCoverageModulationModeNV = (PFN_vkCmdSetCoverageModulationModeNV)load(context, "vkCmdSetCoverageModulationModeNV"); + table->vkCmdSetCoverageModulationTableEnableNV = (PFN_vkCmdSetCoverageModulationTableEnableNV)load(context, "vkCmdSetCoverageModulationTableEnableNV"); + table->vkCmdSetCoverageModulationTableNV = (PFN_vkCmdSetCoverageModulationTableNV)load(context, "vkCmdSetCoverageModulationTableNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) + table->vkCmdSetShadingRateImageEnableNV = (PFN_vkCmdSetShadingRateImageEnableNV)load(context, "vkCmdSetShadingRateImageEnableNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) + table->vkCmdSetRepresentativeFragmentTestEnableNV = (PFN_vkCmdSetRepresentativeFragmentTestEnableNV)load(context, "vkCmdSetRepresentativeFragmentTestEnableNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) + table->vkCmdSetCoverageReductionModeNV = (PFN_vkCmdSetCoverageReductionModeNV)load(context, "vkCmdSetCoverageReductionModeNV"); +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ +#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) + table->vkGetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)load(context, "vkGetDeviceGroupSurfacePresentModes2EXT"); +#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) + table->vkGetImageSubresourceLayout2EXT = (PFN_vkGetImageSubresourceLayout2EXT)load(context, "vkGetImageSubresourceLayout2EXT"); +#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ +#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) + table->vkCmdSetVertexInputEXT = (PFN_vkCmdSetVertexInputEXT)load(context, "vkCmdSetVertexInputEXT"); +#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ +#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) + table->vkCmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)load(context, "vkCmdPushDescriptorSetWithTemplateKHR"); +#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) + table->vkGetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR)load(context, "vkGetDeviceGroupPresentCapabilitiesKHR"); + table->vkGetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR)load(context, "vkGetDeviceGroupSurfacePresentModesKHR"); +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) + table->vkAcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR)load(context, "vkAcquireNextImage2KHR"); +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ + /* VOLK_GENERATE_LOAD_DEVICE_TABLE */ +} + +#ifdef __GNUC__ +#ifdef VOLK_DEFAULT_VISIBILITY +# pragma GCC visibility push(default) +#else +# pragma GCC visibility push(hidden) +#endif +#endif + +/* VOLK_GENERATE_PROTOTYPES_C */ +#if defined(VK_VERSION_1_0) +PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; +PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; +PFN_vkAllocateMemory vkAllocateMemory; +PFN_vkBeginCommandBuffer vkBeginCommandBuffer; +PFN_vkBindBufferMemory vkBindBufferMemory; +PFN_vkBindImageMemory vkBindImageMemory; +PFN_vkCmdBeginQuery vkCmdBeginQuery; +PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; +PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; +PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; +PFN_vkCmdBindPipeline vkCmdBindPipeline; +PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; +PFN_vkCmdBlitImage vkCmdBlitImage; +PFN_vkCmdClearAttachments vkCmdClearAttachments; +PFN_vkCmdClearColorImage vkCmdClearColorImage; +PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; +PFN_vkCmdCopyBuffer vkCmdCopyBuffer; +PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; +PFN_vkCmdCopyImage vkCmdCopyImage; +PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; +PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; +PFN_vkCmdDispatch vkCmdDispatch; +PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; +PFN_vkCmdDraw vkCmdDraw; +PFN_vkCmdDrawIndexed vkCmdDrawIndexed; +PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; +PFN_vkCmdDrawIndirect vkCmdDrawIndirect; +PFN_vkCmdEndQuery vkCmdEndQuery; +PFN_vkCmdEndRenderPass vkCmdEndRenderPass; +PFN_vkCmdExecuteCommands vkCmdExecuteCommands; +PFN_vkCmdFillBuffer vkCmdFillBuffer; +PFN_vkCmdNextSubpass vkCmdNextSubpass; +PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; +PFN_vkCmdPushConstants vkCmdPushConstants; +PFN_vkCmdResetEvent vkCmdResetEvent; +PFN_vkCmdResetQueryPool vkCmdResetQueryPool; +PFN_vkCmdResolveImage vkCmdResolveImage; +PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; +PFN_vkCmdSetDepthBias vkCmdSetDepthBias; +PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; +PFN_vkCmdSetEvent vkCmdSetEvent; +PFN_vkCmdSetLineWidth vkCmdSetLineWidth; +PFN_vkCmdSetScissor vkCmdSetScissor; +PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; +PFN_vkCmdSetStencilReference vkCmdSetStencilReference; +PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; +PFN_vkCmdSetViewport vkCmdSetViewport; +PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; +PFN_vkCmdWaitEvents vkCmdWaitEvents; +PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; +PFN_vkCreateBuffer vkCreateBuffer; +PFN_vkCreateBufferView vkCreateBufferView; +PFN_vkCreateCommandPool vkCreateCommandPool; +PFN_vkCreateComputePipelines vkCreateComputePipelines; +PFN_vkCreateDescriptorPool vkCreateDescriptorPool; +PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; +PFN_vkCreateDevice vkCreateDevice; +PFN_vkCreateEvent vkCreateEvent; +PFN_vkCreateFence vkCreateFence; +PFN_vkCreateFramebuffer vkCreateFramebuffer; +PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; +PFN_vkCreateImage vkCreateImage; +PFN_vkCreateImageView vkCreateImageView; +PFN_vkCreateInstance vkCreateInstance; +PFN_vkCreatePipelineCache vkCreatePipelineCache; +PFN_vkCreatePipelineLayout vkCreatePipelineLayout; +PFN_vkCreateQueryPool vkCreateQueryPool; +PFN_vkCreateRenderPass vkCreateRenderPass; +PFN_vkCreateSampler vkCreateSampler; +PFN_vkCreateSemaphore vkCreateSemaphore; +PFN_vkCreateShaderModule vkCreateShaderModule; +PFN_vkDestroyBuffer vkDestroyBuffer; +PFN_vkDestroyBufferView vkDestroyBufferView; +PFN_vkDestroyCommandPool vkDestroyCommandPool; +PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; +PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; +PFN_vkDestroyDevice vkDestroyDevice; +PFN_vkDestroyEvent vkDestroyEvent; +PFN_vkDestroyFence vkDestroyFence; +PFN_vkDestroyFramebuffer vkDestroyFramebuffer; +PFN_vkDestroyImage vkDestroyImage; +PFN_vkDestroyImageView vkDestroyImageView; +PFN_vkDestroyInstance vkDestroyInstance; +PFN_vkDestroyPipeline vkDestroyPipeline; +PFN_vkDestroyPipelineCache vkDestroyPipelineCache; +PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; +PFN_vkDestroyQueryPool vkDestroyQueryPool; +PFN_vkDestroyRenderPass vkDestroyRenderPass; +PFN_vkDestroySampler vkDestroySampler; +PFN_vkDestroySemaphore vkDestroySemaphore; +PFN_vkDestroyShaderModule vkDestroyShaderModule; +PFN_vkDeviceWaitIdle vkDeviceWaitIdle; +PFN_vkEndCommandBuffer vkEndCommandBuffer; +PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; +PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; +PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; +PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; +PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; +PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; +PFN_vkFreeCommandBuffers vkFreeCommandBuffers; +PFN_vkFreeDescriptorSets vkFreeDescriptorSets; +PFN_vkFreeMemory vkFreeMemory; +PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; +PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; +PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; +PFN_vkGetDeviceQueue vkGetDeviceQueue; +PFN_vkGetEventStatus vkGetEventStatus; +PFN_vkGetFenceStatus vkGetFenceStatus; +PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; +PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; +PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; +PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; +PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; +PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; +PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; +PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; +PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; +PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; +PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; +PFN_vkGetPipelineCacheData vkGetPipelineCacheData; +PFN_vkGetQueryPoolResults vkGetQueryPoolResults; +PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; +PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; +PFN_vkMapMemory vkMapMemory; +PFN_vkMergePipelineCaches vkMergePipelineCaches; +PFN_vkQueueBindSparse vkQueueBindSparse; +PFN_vkQueueSubmit vkQueueSubmit; +PFN_vkQueueWaitIdle vkQueueWaitIdle; +PFN_vkResetCommandBuffer vkResetCommandBuffer; +PFN_vkResetCommandPool vkResetCommandPool; +PFN_vkResetDescriptorPool vkResetDescriptorPool; +PFN_vkResetEvent vkResetEvent; +PFN_vkResetFences vkResetFences; +PFN_vkSetEvent vkSetEvent; +PFN_vkUnmapMemory vkUnmapMemory; +PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; +PFN_vkWaitForFences vkWaitForFences; +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_VERSION_1_1) +PFN_vkBindBufferMemory2 vkBindBufferMemory2; +PFN_vkBindImageMemory2 vkBindImageMemory2; +PFN_vkCmdDispatchBase vkCmdDispatchBase; +PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask; +PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate; +PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion; +PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate; +PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion; +PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion; +PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups; +PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; +PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport; +PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures; +PFN_vkGetDeviceQueue2 vkGetDeviceQueue2; +PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; +PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2; +PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties; +PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties; +PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties; +PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2; +PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2; +PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2; +PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; +PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2; +PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2; +PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2; +PFN_vkTrimCommandPool vkTrimCommandPool; +PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate; +#endif /* defined(VK_VERSION_1_1) */ +#if defined(VK_VERSION_1_2) +PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2; +PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount; +PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount; +PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2; +PFN_vkCmdNextSubpass2 vkCmdNextSubpass2; +PFN_vkCreateRenderPass2 vkCreateRenderPass2; +PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress; +PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress; +PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress; +PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue; +PFN_vkResetQueryPool vkResetQueryPool; +PFN_vkSignalSemaphore vkSignalSemaphore; +PFN_vkWaitSemaphores vkWaitSemaphores; +#endif /* defined(VK_VERSION_1_2) */ +#if defined(VK_VERSION_1_3) +PFN_vkCmdBeginRendering vkCmdBeginRendering; +PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2; +PFN_vkCmdBlitImage2 vkCmdBlitImage2; +PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2; +PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2; +PFN_vkCmdCopyImage2 vkCmdCopyImage2; +PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2; +PFN_vkCmdEndRendering vkCmdEndRendering; +PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2; +PFN_vkCmdResetEvent2 vkCmdResetEvent2; +PFN_vkCmdResolveImage2 vkCmdResolveImage2; +PFN_vkCmdSetCullMode vkCmdSetCullMode; +PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable; +PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable; +PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp; +PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable; +PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable; +PFN_vkCmdSetEvent2 vkCmdSetEvent2; +PFN_vkCmdSetFrontFace vkCmdSetFrontFace; +PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable; +PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology; +PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable; +PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount; +PFN_vkCmdSetStencilOp vkCmdSetStencilOp; +PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable; +PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount; +PFN_vkCmdWaitEvents2 vkCmdWaitEvents2; +PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2; +PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot; +PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot; +PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements; +PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements; +PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements; +PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties; +PFN_vkGetPrivateData vkGetPrivateData; +PFN_vkQueueSubmit2 vkQueueSubmit2; +PFN_vkSetPrivateData vkSetPrivateData; +#endif /* defined(VK_VERSION_1_3) */ +#if defined(VK_AMDX_shader_enqueue) +PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX; +PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX; +PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX; +PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX; +PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX; +PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX; +PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX; +#endif /* defined(VK_AMDX_shader_enqueue) */ +#if defined(VK_AMD_buffer_marker) +PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD; +#endif /* defined(VK_AMD_buffer_marker) */ +#if defined(VK_AMD_display_native_hdr) +PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD; +#endif /* defined(VK_AMD_display_native_hdr) */ +#if defined(VK_AMD_draw_indirect_count) +PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD; +PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD; +#endif /* defined(VK_AMD_draw_indirect_count) */ +#if defined(VK_AMD_shader_info) +PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD; +#endif /* defined(VK_AMD_shader_info) */ +#if defined(VK_ANDROID_external_memory_android_hardware_buffer) +PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID; +PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID; +#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ +#if defined(VK_EXT_acquire_drm_display) +PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT; +PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT; +#endif /* defined(VK_EXT_acquire_drm_display) */ +#if defined(VK_EXT_acquire_xlib_display) +PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT; +PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT; +#endif /* defined(VK_EXT_acquire_xlib_display) */ +#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) +PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT; +#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ +#if defined(VK_EXT_buffer_device_address) +PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT; +#endif /* defined(VK_EXT_buffer_device_address) */ +#if defined(VK_EXT_calibrated_timestamps) +PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT; +PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; +#endif /* defined(VK_EXT_calibrated_timestamps) */ +#if defined(VK_EXT_color_write_enable) +PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT; +#endif /* defined(VK_EXT_color_write_enable) */ +#if defined(VK_EXT_conditional_rendering) +PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT; +PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT; +#endif /* defined(VK_EXT_conditional_rendering) */ +#if defined(VK_EXT_debug_marker) +PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT; +PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT; +PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT; +PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT; +PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT; +#endif /* defined(VK_EXT_debug_marker) */ +#if defined(VK_EXT_debug_report) +PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT; +PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT; +PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT; +#endif /* defined(VK_EXT_debug_report) */ +#if defined(VK_EXT_debug_utils) +PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT; +PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT; +PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT; +PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; +PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT; +PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT; +PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT; +PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT; +PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT; +PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT; +PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT; +#endif /* defined(VK_EXT_debug_utils) */ +#if defined(VK_EXT_depth_bias_control) +PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT; +#endif /* defined(VK_EXT_depth_bias_control) */ +#if defined(VK_EXT_descriptor_buffer) +PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT; +PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT; +PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT; +PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT; +PFN_vkGetDescriptorEXT vkGetDescriptorEXT; +PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT; +PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT; +PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT; +PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT; +PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT; +#endif /* defined(VK_EXT_descriptor_buffer) */ +#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) +PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT; +#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ +#if defined(VK_EXT_device_fault) +PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT; +#endif /* defined(VK_EXT_device_fault) */ +#if defined(VK_EXT_direct_mode_display) +PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT; +#endif /* defined(VK_EXT_direct_mode_display) */ +#if defined(VK_EXT_directfb_surface) +PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT; +PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT; +#endif /* defined(VK_EXT_directfb_surface) */ +#if defined(VK_EXT_discard_rectangles) +PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT; +#endif /* defined(VK_EXT_discard_rectangles) */ +#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 +PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT; +PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT; +#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ +#if defined(VK_EXT_display_control) +PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT; +PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT; +PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT; +PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT; +#endif /* defined(VK_EXT_display_control) */ +#if defined(VK_EXT_display_surface_counter) +PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT; +#endif /* defined(VK_EXT_display_surface_counter) */ +#if defined(VK_EXT_external_memory_host) +PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT; +#endif /* defined(VK_EXT_external_memory_host) */ +#if defined(VK_EXT_full_screen_exclusive) +PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT; +PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT; +PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT; +#endif /* defined(VK_EXT_full_screen_exclusive) */ +#if defined(VK_EXT_hdr_metadata) +PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT; +#endif /* defined(VK_EXT_hdr_metadata) */ +#if defined(VK_EXT_headless_surface) +PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT; +#endif /* defined(VK_EXT_headless_surface) */ +#if defined(VK_EXT_host_image_copy) +PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT; +PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT; +PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT; +PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT; +#endif /* defined(VK_EXT_host_image_copy) */ +#if defined(VK_EXT_host_query_reset) +PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; +#endif /* defined(VK_EXT_host_query_reset) */ +#if defined(VK_EXT_image_drm_format_modifier) +PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT; +#endif /* defined(VK_EXT_image_drm_format_modifier) */ +#if defined(VK_EXT_line_rasterization) +PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT; +#endif /* defined(VK_EXT_line_rasterization) */ +#if defined(VK_EXT_mesh_shader) +PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT; +PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT; +PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT; +#endif /* defined(VK_EXT_mesh_shader) */ +#if defined(VK_EXT_metal_objects) +PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT; +#endif /* defined(VK_EXT_metal_objects) */ +#if defined(VK_EXT_metal_surface) +PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT; +#endif /* defined(VK_EXT_metal_surface) */ +#if defined(VK_EXT_multi_draw) +PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT; +PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT; +#endif /* defined(VK_EXT_multi_draw) */ +#if defined(VK_EXT_opacity_micromap) +PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT; +PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT; +PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT; +PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT; +PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT; +PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT; +PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT; +PFN_vkCopyMicromapEXT vkCopyMicromapEXT; +PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT; +PFN_vkCreateMicromapEXT vkCreateMicromapEXT; +PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT; +PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT; +PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT; +PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT; +#endif /* defined(VK_EXT_opacity_micromap) */ +#if defined(VK_EXT_pageable_device_local_memory) +PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT; +#endif /* defined(VK_EXT_pageable_device_local_memory) */ +#if defined(VK_EXT_pipeline_properties) +PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT; +#endif /* defined(VK_EXT_pipeline_properties) */ +#if defined(VK_EXT_private_data) +PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT; +PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT; +PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT; +PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT; +#endif /* defined(VK_EXT_private_data) */ +#if defined(VK_EXT_sample_locations) +PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT; +PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT; +#endif /* defined(VK_EXT_sample_locations) */ +#if defined(VK_EXT_shader_module_identifier) +PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT; +PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT; +#endif /* defined(VK_EXT_shader_module_identifier) */ +#if defined(VK_EXT_shader_object) +PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT; +PFN_vkCreateShadersEXT vkCreateShadersEXT; +PFN_vkDestroyShaderEXT vkDestroyShaderEXT; +PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT; +#endif /* defined(VK_EXT_shader_object) */ +#if defined(VK_EXT_swapchain_maintenance1) +PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT; +#endif /* defined(VK_EXT_swapchain_maintenance1) */ +#if defined(VK_EXT_tooling_info) +PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT; +#endif /* defined(VK_EXT_tooling_info) */ +#if defined(VK_EXT_transform_feedback) +PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT; +PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; +PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; +PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT; +PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT; +PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; +#endif /* defined(VK_EXT_transform_feedback) */ +#if defined(VK_EXT_validation_cache) +PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT; +PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT; +PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT; +PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT; +#endif /* defined(VK_EXT_validation_cache) */ +#if defined(VK_FUCHSIA_buffer_collection) +PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA; +PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA; +PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA; +PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA; +PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA; +#endif /* defined(VK_FUCHSIA_buffer_collection) */ +#if defined(VK_FUCHSIA_external_memory) +PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA; +PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA; +#endif /* defined(VK_FUCHSIA_external_memory) */ +#if defined(VK_FUCHSIA_external_semaphore) +PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA; +PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA; +#endif /* defined(VK_FUCHSIA_external_semaphore) */ +#if defined(VK_FUCHSIA_imagepipe_surface) +PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA; +#endif /* defined(VK_FUCHSIA_imagepipe_surface) */ +#if defined(VK_GGP_stream_descriptor_surface) +PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP; +#endif /* defined(VK_GGP_stream_descriptor_surface) */ +#if defined(VK_GOOGLE_display_timing) +PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE; +PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE; +#endif /* defined(VK_GOOGLE_display_timing) */ +#if defined(VK_HUAWEI_cluster_culling_shader) +PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI; +PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI; +#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ +#if defined(VK_HUAWEI_invocation_mask) +PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI; +#endif /* defined(VK_HUAWEI_invocation_mask) */ +#if defined(VK_HUAWEI_subpass_shading) +PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI; +PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI; +#endif /* defined(VK_HUAWEI_subpass_shading) */ +#if defined(VK_INTEL_performance_query) +PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL; +PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL; +PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL; +PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL; +PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL; +PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL; +PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL; +PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL; +PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL; +#endif /* defined(VK_INTEL_performance_query) */ +#if defined(VK_KHR_acceleration_structure) +PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR; +PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR; +PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR; +PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR; +PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR; +PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR; +PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR; +PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR; +PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR; +PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR; +PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR; +PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR; +PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR; +PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR; +PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR; +PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR; +#endif /* defined(VK_KHR_acceleration_structure) */ +#if defined(VK_KHR_android_surface) +PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR; +#endif /* defined(VK_KHR_android_surface) */ +#if defined(VK_KHR_bind_memory2) +PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR; +PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR; +#endif /* defined(VK_KHR_bind_memory2) */ +#if defined(VK_KHR_buffer_device_address) +PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR; +PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR; +PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR; +#endif /* defined(VK_KHR_buffer_device_address) */ +#if defined(VK_KHR_cooperative_matrix) +PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR; +#endif /* defined(VK_KHR_cooperative_matrix) */ +#if defined(VK_KHR_copy_commands2) +PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR; +PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR; +PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR; +PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR; +PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR; +PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR; +#endif /* defined(VK_KHR_copy_commands2) */ +#if defined(VK_KHR_create_renderpass2) +PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR; +PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR; +PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR; +PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR; +#endif /* defined(VK_KHR_create_renderpass2) */ +#if defined(VK_KHR_deferred_host_operations) +PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR; +PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR; +PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR; +PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR; +PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR; +#endif /* defined(VK_KHR_deferred_host_operations) */ +#if defined(VK_KHR_descriptor_update_template) +PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; +PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; +PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; +#endif /* defined(VK_KHR_descriptor_update_template) */ +#if defined(VK_KHR_device_group) +PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR; +PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR; +PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR; +#endif /* defined(VK_KHR_device_group) */ +#if defined(VK_KHR_device_group_creation) +PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR; +#endif /* defined(VK_KHR_device_group_creation) */ +#if defined(VK_KHR_display) +PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR; +PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR; +PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR; +PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR; +PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR; +PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR; +PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR; +#endif /* defined(VK_KHR_display) */ +#if defined(VK_KHR_display_swapchain) +PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR; +#endif /* defined(VK_KHR_display_swapchain) */ +#if defined(VK_KHR_draw_indirect_count) +PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR; +PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR; +#endif /* defined(VK_KHR_draw_indirect_count) */ +#if defined(VK_KHR_dynamic_rendering) +PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR; +PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR; +#endif /* defined(VK_KHR_dynamic_rendering) */ +#if defined(VK_KHR_external_fence_capabilities) +PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR; +#endif /* defined(VK_KHR_external_fence_capabilities) */ +#if defined(VK_KHR_external_fence_fd) +PFN_vkGetFenceFdKHR vkGetFenceFdKHR; +PFN_vkImportFenceFdKHR vkImportFenceFdKHR; +#endif /* defined(VK_KHR_external_fence_fd) */ +#if defined(VK_KHR_external_fence_win32) +PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR; +PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR; +#endif /* defined(VK_KHR_external_fence_win32) */ +#if defined(VK_KHR_external_memory_capabilities) +PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR; +#endif /* defined(VK_KHR_external_memory_capabilities) */ +#if defined(VK_KHR_external_memory_fd) +PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR; +PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR; +#endif /* defined(VK_KHR_external_memory_fd) */ +#if defined(VK_KHR_external_memory_win32) +PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR; +PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR; +#endif /* defined(VK_KHR_external_memory_win32) */ +#if defined(VK_KHR_external_semaphore_capabilities) +PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; +#endif /* defined(VK_KHR_external_semaphore_capabilities) */ +#if defined(VK_KHR_external_semaphore_fd) +PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR; +PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR; +#endif /* defined(VK_KHR_external_semaphore_fd) */ +#if defined(VK_KHR_external_semaphore_win32) +PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR; +PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR; +#endif /* defined(VK_KHR_external_semaphore_win32) */ +#if defined(VK_KHR_fragment_shading_rate) +PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR; +PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR; +#endif /* defined(VK_KHR_fragment_shading_rate) */ +#if defined(VK_KHR_get_display_properties2) +PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR; +PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR; +PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR; +PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR; +#endif /* defined(VK_KHR_get_display_properties2) */ +#if defined(VK_KHR_get_memory_requirements2) +PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR; +PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR; +PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR; +#endif /* defined(VK_KHR_get_memory_requirements2) */ +#if defined(VK_KHR_get_physical_device_properties2) +PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; +PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR; +PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR; +PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR; +PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; +PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR; +PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR; +#endif /* defined(VK_KHR_get_physical_device_properties2) */ +#if defined(VK_KHR_get_surface_capabilities2) +PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR; +PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR; +#endif /* defined(VK_KHR_get_surface_capabilities2) */ +#if defined(VK_KHR_maintenance1) +PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR; +#endif /* defined(VK_KHR_maintenance1) */ +#if defined(VK_KHR_maintenance3) +PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR; +#endif /* defined(VK_KHR_maintenance3) */ +#if defined(VK_KHR_maintenance4) +PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR; +PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR; +PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR; +#endif /* defined(VK_KHR_maintenance4) */ +#if defined(VK_KHR_maintenance5) +PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR; +PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR; +PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR; +PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR; +#endif /* defined(VK_KHR_maintenance5) */ +#if defined(VK_KHR_map_memory2) +PFN_vkMapMemory2KHR vkMapMemory2KHR; +PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR; +#endif /* defined(VK_KHR_map_memory2) */ +#if defined(VK_KHR_performance_query) +PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR; +PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR; +PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR; +PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR; +#endif /* defined(VK_KHR_performance_query) */ +#if defined(VK_KHR_pipeline_executable_properties) +PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR; +PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR; +PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR; +#endif /* defined(VK_KHR_pipeline_executable_properties) */ +#if defined(VK_KHR_present_wait) +PFN_vkWaitForPresentKHR vkWaitForPresentKHR; +#endif /* defined(VK_KHR_present_wait) */ +#if defined(VK_KHR_push_descriptor) +PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR; +#endif /* defined(VK_KHR_push_descriptor) */ +#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) +PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR; +#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_ray_tracing_pipeline) +PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR; +PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR; +PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR; +PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR; +PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR; +PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR; +PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR; +#endif /* defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_sampler_ycbcr_conversion) +PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR; +PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR; +#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ +#if defined(VK_KHR_shared_presentable_image) +PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR; +#endif /* defined(VK_KHR_shared_presentable_image) */ +#if defined(VK_KHR_surface) +PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; +PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; +PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; +PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; +PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; +#endif /* defined(VK_KHR_surface) */ +#if defined(VK_KHR_swapchain) +PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; +PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; +PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; +PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; +PFN_vkQueuePresentKHR vkQueuePresentKHR; +#endif /* defined(VK_KHR_swapchain) */ +#if defined(VK_KHR_synchronization2) +PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR; +PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR; +PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR; +PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR; +PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR; +PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR; +#endif /* defined(VK_KHR_synchronization2) */ +#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) +PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD; +#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ +#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) +PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV; +#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_KHR_timeline_semaphore) +PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR; +PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR; +PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR; +#endif /* defined(VK_KHR_timeline_semaphore) */ +#if defined(VK_KHR_video_decode_queue) +PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR; +#endif /* defined(VK_KHR_video_decode_queue) */ +#if defined(VK_KHR_video_encode_queue) +PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR; +PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR; +PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR; +#endif /* defined(VK_KHR_video_encode_queue) */ +#if defined(VK_KHR_video_queue) +PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR; +PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR; +PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR; +PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR; +PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR; +PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR; +PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR; +PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR; +PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR; +PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR; +PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR; +PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR; +#endif /* defined(VK_KHR_video_queue) */ +#if defined(VK_KHR_wayland_surface) +PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR; +PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR; +#endif /* defined(VK_KHR_wayland_surface) */ +#if defined(VK_KHR_win32_surface) +PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR; +PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR; +#endif /* defined(VK_KHR_win32_surface) */ +#if defined(VK_KHR_xcb_surface) +PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR; +PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR; +#endif /* defined(VK_KHR_xcb_surface) */ +#if defined(VK_KHR_xlib_surface) +PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR; +PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR; +#endif /* defined(VK_KHR_xlib_surface) */ +#if defined(VK_MVK_ios_surface) +PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK; +#endif /* defined(VK_MVK_ios_surface) */ +#if defined(VK_MVK_macos_surface) +PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK; +#endif /* defined(VK_MVK_macos_surface) */ +#if defined(VK_NN_vi_surface) +PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN; +#endif /* defined(VK_NN_vi_surface) */ +#if defined(VK_NVX_binary_import) +PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX; +PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX; +PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX; +PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX; +PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX; +#endif /* defined(VK_NVX_binary_import) */ +#if defined(VK_NVX_image_view_handle) +PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX; +PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX; +#endif /* defined(VK_NVX_image_view_handle) */ +#if defined(VK_NV_acquire_winrt_display) +PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV; +PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV; +#endif /* defined(VK_NV_acquire_winrt_display) */ +#if defined(VK_NV_clip_space_w_scaling) +PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV; +#endif /* defined(VK_NV_clip_space_w_scaling) */ +#if defined(VK_NV_cooperative_matrix) +PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV; +#endif /* defined(VK_NV_cooperative_matrix) */ +#if defined(VK_NV_copy_memory_indirect) +PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV; +PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV; +#endif /* defined(VK_NV_copy_memory_indirect) */ +#if defined(VK_NV_coverage_reduction_mode) +PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV; +#endif /* defined(VK_NV_coverage_reduction_mode) */ +#if defined(VK_NV_cuda_kernel_launch) +PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV; +PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV; +PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV; +PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV; +PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV; +PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV; +#endif /* defined(VK_NV_cuda_kernel_launch) */ +#if defined(VK_NV_device_diagnostic_checkpoints) +PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV; +PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV; +#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_NV_device_generated_commands) +PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV; +PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV; +PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV; +PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV; +PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV; +PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV; +#endif /* defined(VK_NV_device_generated_commands) */ +#if defined(VK_NV_device_generated_commands_compute) +PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV; +PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV; +PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV; +#endif /* defined(VK_NV_device_generated_commands_compute) */ +#if defined(VK_NV_external_memory_capabilities) +PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV; +#endif /* defined(VK_NV_external_memory_capabilities) */ +#if defined(VK_NV_external_memory_rdma) +PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV; +#endif /* defined(VK_NV_external_memory_rdma) */ +#if defined(VK_NV_external_memory_win32) +PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV; +#endif /* defined(VK_NV_external_memory_win32) */ +#if defined(VK_NV_fragment_shading_rate_enums) +PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV; +#endif /* defined(VK_NV_fragment_shading_rate_enums) */ +#if defined(VK_NV_low_latency2) +PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV; +PFN_vkLatencySleepNV vkLatencySleepNV; +PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV; +PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV; +PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV; +#endif /* defined(VK_NV_low_latency2) */ +#if defined(VK_NV_memory_decompression) +PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV; +PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV; +#endif /* defined(VK_NV_memory_decompression) */ +#if defined(VK_NV_mesh_shader) +PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV; +PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV; +PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV; +#endif /* defined(VK_NV_mesh_shader) */ +#if defined(VK_NV_optical_flow) +PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV; +PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV; +PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV; +PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV; +PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV; +#endif /* defined(VK_NV_optical_flow) */ +#if defined(VK_NV_ray_tracing) +PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV; +PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV; +PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV; +PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV; +PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV; +PFN_vkCompileDeferredNV vkCompileDeferredNV; +PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV; +PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV; +PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV; +PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV; +PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV; +PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV; +#endif /* defined(VK_NV_ray_tracing) */ +#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 +PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV; +#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ +#if defined(VK_NV_scissor_exclusive) +PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV; +#endif /* defined(VK_NV_scissor_exclusive) */ +#if defined(VK_NV_shading_rate_image) +PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV; +PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV; +PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV; +#endif /* defined(VK_NV_shading_rate_image) */ +#if defined(VK_QCOM_tile_properties) +PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM; +PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM; +#endif /* defined(VK_QCOM_tile_properties) */ +#if defined(VK_QNX_external_memory_screen_buffer) +PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX; +#endif /* defined(VK_QNX_external_memory_screen_buffer) */ +#if defined(VK_QNX_screen_surface) +PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX; +PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX; +#endif /* defined(VK_QNX_screen_surface) */ +#if defined(VK_VALVE_descriptor_set_host_mapping) +PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE; +PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE; +#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) +PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT; +PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT; +PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT; +PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT; +PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT; +PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT; +PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT; +PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT; +PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT; +PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT; +PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT; +PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) +PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT; +PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT; +PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT; +PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT; +PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) +PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT; +PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT; +PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT; +PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT; +PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT; +PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT; +PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT; +PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT; +PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT; +PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT; +PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT; +PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT; +PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT; +PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT; +PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT; +PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT; +PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT; +PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT; +PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT; +PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT; +PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) +PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) +PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) +PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV; +PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) +PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV; +PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV; +PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) +PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) +PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) +PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ +#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) +PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT; +#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) +PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT; +#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ +#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) +PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT; +#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ +#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) +PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR; +#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) +PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR; +PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR; +PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR; +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) +PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR; +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ +/* VOLK_GENERATE_PROTOTYPES_C */ + +#ifdef __GNUC__ +# pragma GCC visibility pop +#endif + +#ifdef __cplusplus +} +#endif +/* clang-format on */ diff --git a/MacroLibX/third_party/volk.h b/MacroLibX/third_party/volk.h new file mode 100644 index 0000000..47fbed2 --- /dev/null +++ b/MacroLibX/third_party/volk.h @@ -0,0 +1,1985 @@ +/** + * volk + * + * Copyright (C) 2018-2023, by Arseny Kapoulkine (arseny.kapoulkine@gmail.com) + * Report bugs and download new versions at https://github.com/zeux/volk + * + * This library is distributed under the MIT License. See notice at the end of this file. + */ +/* clang-format off */ +#ifndef VOLK_H_ +#define VOLK_H_ + +#if defined(VULKAN_H_) && !defined(VK_NO_PROTOTYPES) +# error To use volk, you need to define VK_NO_PROTOTYPES before including vulkan.h +#endif + +/* VOLK_GENERATE_VERSION_DEFINE */ +#define VOLK_HEADER_VERSION 270 +/* VOLK_GENERATE_VERSION_DEFINE */ + +#ifndef VK_NO_PROTOTYPES +# define VK_NO_PROTOTYPES +#endif + +#ifndef VULKAN_H_ +# ifdef VOLK_VULKAN_H_PATH +# include VOLK_VULKAN_H_PATH +# elif defined(VK_USE_PLATFORM_WIN32_KHR) +# include +# include + + /* When VK_USE_PLATFORM_WIN32_KHR is defined, instead of including vulkan.h directly, we include individual parts of the SDK + * This is necessary to avoid including which is very heavy - it takes 200ms to parse without WIN32_LEAN_AND_MEAN + * and 100ms to parse with it. vulkan_win32.h only needs a few symbols that are easy to redefine ourselves. + */ + typedef unsigned long DWORD; + typedef const wchar_t* LPCWSTR; + typedef void* HANDLE; + typedef struct HINSTANCE__* HINSTANCE; + typedef struct HWND__* HWND; + typedef struct HMONITOR__* HMONITOR; + typedef struct _SECURITY_ATTRIBUTES SECURITY_ATTRIBUTES; + +# include + +# ifdef VK_ENABLE_BETA_EXTENSIONS +# include +# endif +# else +# include +# endif +#endif + +/* Disable several extensions on earlier SDKs because later SDKs introduce a backwards incompatible change to function signatures */ +#if VK_HEADER_VERSION < 140 +# undef VK_NVX_image_view_handle +#endif +#if VK_HEADER_VERSION < 184 +# undef VK_HUAWEI_subpass_shading +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +struct VolkDeviceTable; + +/** + * Initialize library by loading Vulkan loader; call this function before creating the Vulkan instance. + * + * Returns VK_SUCCESS on success and VK_ERROR_INITIALIZATION_FAILED otherwise. + */ +VkResult volkInitialize(void); + +/** + * Initialize library by providing a custom handler to load global symbols. + * + * This function can be used instead of volkInitialize. + * The handler function pointer will be asked to load global Vulkan symbols which require no instance + * (such as vkCreateInstance, vkEnumerateInstance* and vkEnumerateInstanceVersion if available). + */ +void volkInitializeCustom(PFN_vkGetInstanceProcAddr handler); + +/** + * Finalize library by unloading Vulkan loader and resetting global symbols to NULL. + */ +void volkFinalize(void); + +/** + * Get Vulkan instance version supported by the Vulkan loader, or 0 if Vulkan isn't supported + * + * Returns 0 if volkInitialize wasn't called or failed. + */ +uint32_t volkGetInstanceVersion(void); + +/** + * Load global function pointers using application-created VkInstance; call this function after creating the Vulkan instance. + */ +void volkLoadInstance(VkInstance instance); + +/** + * Load global function pointers using application-created VkInstance; call this function after creating the Vulkan instance. + * Skips loading device-based function pointers, requires usage of volkLoadDevice afterwards. + */ +void volkLoadInstanceOnly(VkInstance instance); + +/** + * Load global function pointers using application-created VkDevice; call this function after creating the Vulkan device. + * + * Note: this is not suitable for applications that want to use multiple VkDevice objects concurrently. + */ +void volkLoadDevice(VkDevice device); + +/** + * Return last VkInstance for which global function pointers have been loaded via volkLoadInstance(), + * or VK_NULL_HANDLE if volkLoadInstance() has not been called. + */ +VkInstance volkGetLoadedInstance(void); + +/** + * Return last VkDevice for which global function pointers have been loaded via volkLoadDevice(), + * or VK_NULL_HANDLE if volkLoadDevice() has not been called. + */ +VkDevice volkGetLoadedDevice(void); + +/** + * Load function pointers using application-created VkDevice into a table. + * Application should use function pointers from that table instead of using global function pointers. + */ +void volkLoadDeviceTable(struct VolkDeviceTable* table, VkDevice device); + +/** + * Device-specific function pointer table + */ +struct VolkDeviceTable +{ + /* VOLK_GENERATE_DEVICE_TABLE */ +#if defined(VK_VERSION_1_0) + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; + PFN_vkAllocateMemory vkAllocateMemory; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer; + PFN_vkBindBufferMemory vkBindBufferMemory; + PFN_vkBindImageMemory vkBindImageMemory; + PFN_vkCmdBeginQuery vkCmdBeginQuery; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; + PFN_vkCmdBindPipeline vkCmdBindPipeline; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; + PFN_vkCmdBlitImage vkCmdBlitImage; + PFN_vkCmdClearAttachments vkCmdClearAttachments; + PFN_vkCmdClearColorImage vkCmdClearColorImage; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; + PFN_vkCmdCopyImage vkCmdCopyImage; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; + PFN_vkCmdDispatch vkCmdDispatch; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; + PFN_vkCmdDraw vkCmdDraw; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect; + PFN_vkCmdEndQuery vkCmdEndQuery; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands; + PFN_vkCmdFillBuffer vkCmdFillBuffer; + PFN_vkCmdNextSubpass vkCmdNextSubpass; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; + PFN_vkCmdPushConstants vkCmdPushConstants; + PFN_vkCmdResetEvent vkCmdResetEvent; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool; + PFN_vkCmdResolveImage vkCmdResolveImage; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; + PFN_vkCmdSetEvent vkCmdSetEvent; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth; + PFN_vkCmdSetScissor vkCmdSetScissor; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; + PFN_vkCmdSetViewport vkCmdSetViewport; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; + PFN_vkCmdWaitEvents vkCmdWaitEvents; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; + PFN_vkCreateBuffer vkCreateBuffer; + PFN_vkCreateBufferView vkCreateBufferView; + PFN_vkCreateCommandPool vkCreateCommandPool; + PFN_vkCreateComputePipelines vkCreateComputePipelines; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; + PFN_vkCreateEvent vkCreateEvent; + PFN_vkCreateFence vkCreateFence; + PFN_vkCreateFramebuffer vkCreateFramebuffer; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; + PFN_vkCreateImage vkCreateImage; + PFN_vkCreateImageView vkCreateImageView; + PFN_vkCreatePipelineCache vkCreatePipelineCache; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout; + PFN_vkCreateQueryPool vkCreateQueryPool; + PFN_vkCreateRenderPass vkCreateRenderPass; + PFN_vkCreateSampler vkCreateSampler; + PFN_vkCreateSemaphore vkCreateSemaphore; + PFN_vkCreateShaderModule vkCreateShaderModule; + PFN_vkDestroyBuffer vkDestroyBuffer; + PFN_vkDestroyBufferView vkDestroyBufferView; + PFN_vkDestroyCommandPool vkDestroyCommandPool; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; + PFN_vkDestroyDevice vkDestroyDevice; + PFN_vkDestroyEvent vkDestroyEvent; + PFN_vkDestroyFence vkDestroyFence; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer; + PFN_vkDestroyImage vkDestroyImage; + PFN_vkDestroyImageView vkDestroyImageView; + PFN_vkDestroyPipeline vkDestroyPipeline; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; + PFN_vkDestroyQueryPool vkDestroyQueryPool; + PFN_vkDestroyRenderPass vkDestroyRenderPass; + PFN_vkDestroySampler vkDestroySampler; + PFN_vkDestroySemaphore vkDestroySemaphore; + PFN_vkDestroyShaderModule vkDestroyShaderModule; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle; + PFN_vkEndCommandBuffer vkEndCommandBuffer; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets; + PFN_vkFreeMemory vkFreeMemory; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; + PFN_vkGetDeviceQueue vkGetDeviceQueue; + PFN_vkGetEventStatus vkGetEventStatus; + PFN_vkGetFenceStatus vkGetFenceStatus; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; + PFN_vkMapMemory vkMapMemory; + PFN_vkMergePipelineCaches vkMergePipelineCaches; + PFN_vkQueueBindSparse vkQueueBindSparse; + PFN_vkQueueSubmit vkQueueSubmit; + PFN_vkQueueWaitIdle vkQueueWaitIdle; + PFN_vkResetCommandBuffer vkResetCommandBuffer; + PFN_vkResetCommandPool vkResetCommandPool; + PFN_vkResetDescriptorPool vkResetDescriptorPool; + PFN_vkResetEvent vkResetEvent; + PFN_vkResetFences vkResetFences; + PFN_vkSetEvent vkSetEvent; + PFN_vkUnmapMemory vkUnmapMemory; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; + PFN_vkWaitForFences vkWaitForFences; +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_VERSION_1_1) + PFN_vkBindBufferMemory2 vkBindBufferMemory2; + PFN_vkBindImageMemory2 vkBindImageMemory2; + PFN_vkCmdDispatchBase vkCmdDispatchBase; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2; + PFN_vkTrimCommandPool vkTrimCommandPool; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate; +#endif /* defined(VK_VERSION_1_1) */ +#if defined(VK_VERSION_1_2) + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount; + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2; + PFN_vkCreateRenderPass2 vkCreateRenderPass2; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue; + PFN_vkResetQueryPool vkResetQueryPool; + PFN_vkSignalSemaphore vkSignalSemaphore; + PFN_vkWaitSemaphores vkWaitSemaphores; +#endif /* defined(VK_VERSION_1_2) */ +#if defined(VK_VERSION_1_3) + PFN_vkCmdBeginRendering vkCmdBeginRendering; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2; + PFN_vkCmdBlitImage2 vkCmdBlitImage2; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2; + PFN_vkCmdCopyImage2 vkCmdCopyImage2; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2; + PFN_vkCmdEndRendering vkCmdEndRendering; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2; + PFN_vkCmdResetEvent2 vkCmdResetEvent2; + PFN_vkCmdResolveImage2 vkCmdResolveImage2; + PFN_vkCmdSetCullMode vkCmdSetCullMode; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable; + PFN_vkCmdSetEvent2 vkCmdSetEvent2; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2; + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements; + PFN_vkGetPrivateData vkGetPrivateData; + PFN_vkQueueSubmit2 vkQueueSubmit2; + PFN_vkSetPrivateData vkSetPrivateData; +#endif /* defined(VK_VERSION_1_3) */ +#if defined(VK_AMDX_shader_enqueue) + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX; + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX; +#endif /* defined(VK_AMDX_shader_enqueue) */ +#if defined(VK_AMD_buffer_marker) + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD; +#endif /* defined(VK_AMD_buffer_marker) */ +#if defined(VK_AMD_display_native_hdr) + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD; +#endif /* defined(VK_AMD_display_native_hdr) */ +#if defined(VK_AMD_draw_indirect_count) + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD; +#endif /* defined(VK_AMD_draw_indirect_count) */ +#if defined(VK_AMD_shader_info) + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD; +#endif /* defined(VK_AMD_shader_info) */ +#if defined(VK_ANDROID_external_memory_android_hardware_buffer) + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID; +#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ +#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT; +#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ +#if defined(VK_EXT_buffer_device_address) + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT; +#endif /* defined(VK_EXT_buffer_device_address) */ +#if defined(VK_EXT_calibrated_timestamps) + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT; +#endif /* defined(VK_EXT_calibrated_timestamps) */ +#if defined(VK_EXT_color_write_enable) + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT; +#endif /* defined(VK_EXT_color_write_enable) */ +#if defined(VK_EXT_conditional_rendering) + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT; +#endif /* defined(VK_EXT_conditional_rendering) */ +#if defined(VK_EXT_debug_marker) + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT; + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT; +#endif /* defined(VK_EXT_debug_marker) */ +#if defined(VK_EXT_depth_bias_control) + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT; +#endif /* defined(VK_EXT_depth_bias_control) */ +#if defined(VK_EXT_descriptor_buffer) + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT; + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT; +#endif /* defined(VK_EXT_descriptor_buffer) */ +#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT; +#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ +#if defined(VK_EXT_device_fault) + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT; +#endif /* defined(VK_EXT_device_fault) */ +#if defined(VK_EXT_discard_rectangles) + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT; +#endif /* defined(VK_EXT_discard_rectangles) */ +#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT; +#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ +#if defined(VK_EXT_display_control) + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT; +#endif /* defined(VK_EXT_display_control) */ +#if defined(VK_EXT_external_memory_host) + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT; +#endif /* defined(VK_EXT_external_memory_host) */ +#if defined(VK_EXT_full_screen_exclusive) + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT; +#endif /* defined(VK_EXT_full_screen_exclusive) */ +#if defined(VK_EXT_hdr_metadata) + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT; +#endif /* defined(VK_EXT_hdr_metadata) */ +#if defined(VK_EXT_host_image_copy) + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT; + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT; +#endif /* defined(VK_EXT_host_image_copy) */ +#if defined(VK_EXT_host_query_reset) + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; +#endif /* defined(VK_EXT_host_query_reset) */ +#if defined(VK_EXT_image_drm_format_modifier) + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT; +#endif /* defined(VK_EXT_image_drm_format_modifier) */ +#if defined(VK_EXT_line_rasterization) + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT; +#endif /* defined(VK_EXT_line_rasterization) */ +#if defined(VK_EXT_mesh_shader) + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT; +#endif /* defined(VK_EXT_mesh_shader) */ +#if defined(VK_EXT_metal_objects) + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT; +#endif /* defined(VK_EXT_metal_objects) */ +#if defined(VK_EXT_multi_draw) + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT; +#endif /* defined(VK_EXT_multi_draw) */ +#if defined(VK_EXT_opacity_micromap) + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT; + PFN_vkCreateMicromapEXT vkCreateMicromapEXT; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT; +#endif /* defined(VK_EXT_opacity_micromap) */ +#if defined(VK_EXT_pageable_device_local_memory) + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT; +#endif /* defined(VK_EXT_pageable_device_local_memory) */ +#if defined(VK_EXT_pipeline_properties) + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT; +#endif /* defined(VK_EXT_pipeline_properties) */ +#if defined(VK_EXT_private_data) + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT; +#endif /* defined(VK_EXT_private_data) */ +#if defined(VK_EXT_sample_locations) + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT; +#endif /* defined(VK_EXT_sample_locations) */ +#if defined(VK_EXT_shader_module_identifier) + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT; + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT; +#endif /* defined(VK_EXT_shader_module_identifier) */ +#if defined(VK_EXT_shader_object) + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT; + PFN_vkCreateShadersEXT vkCreateShadersEXT; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT; +#endif /* defined(VK_EXT_shader_object) */ +#if defined(VK_EXT_swapchain_maintenance1) + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT; +#endif /* defined(VK_EXT_swapchain_maintenance1) */ +#if defined(VK_EXT_transform_feedback) + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; +#endif /* defined(VK_EXT_transform_feedback) */ +#if defined(VK_EXT_validation_cache) + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT; +#endif /* defined(VK_EXT_validation_cache) */ +#if defined(VK_FUCHSIA_buffer_collection) + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA; +#endif /* defined(VK_FUCHSIA_buffer_collection) */ +#if defined(VK_FUCHSIA_external_memory) + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA; +#endif /* defined(VK_FUCHSIA_external_memory) */ +#if defined(VK_FUCHSIA_external_semaphore) + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA; + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA; +#endif /* defined(VK_FUCHSIA_external_semaphore) */ +#if defined(VK_GOOGLE_display_timing) + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE; + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE; +#endif /* defined(VK_GOOGLE_display_timing) */ +#if defined(VK_HUAWEI_cluster_culling_shader) + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI; +#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ +#if defined(VK_HUAWEI_invocation_mask) + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI; +#endif /* defined(VK_HUAWEI_invocation_mask) */ +#if defined(VK_HUAWEI_subpass_shading) + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI; + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI; +#endif /* defined(VK_HUAWEI_subpass_shading) */ +#if defined(VK_INTEL_performance_query) + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL; + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL; +#endif /* defined(VK_INTEL_performance_query) */ +#if defined(VK_KHR_acceleration_structure) + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR; + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR; +#endif /* defined(VK_KHR_acceleration_structure) */ +#if defined(VK_KHR_bind_memory2) + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR; +#endif /* defined(VK_KHR_bind_memory2) */ +#if defined(VK_KHR_buffer_device_address) + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR; +#endif /* defined(VK_KHR_buffer_device_address) */ +#if defined(VK_KHR_copy_commands2) + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR; + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR; +#endif /* defined(VK_KHR_copy_commands2) */ +#if defined(VK_KHR_create_renderpass2) + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR; +#endif /* defined(VK_KHR_create_renderpass2) */ +#if defined(VK_KHR_deferred_host_operations) + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR; +#endif /* defined(VK_KHR_deferred_host_operations) */ +#if defined(VK_KHR_descriptor_update_template) + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; +#endif /* defined(VK_KHR_descriptor_update_template) */ +#if defined(VK_KHR_device_group) + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR; +#endif /* defined(VK_KHR_device_group) */ +#if defined(VK_KHR_display_swapchain) + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR; +#endif /* defined(VK_KHR_display_swapchain) */ +#if defined(VK_KHR_draw_indirect_count) + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR; +#endif /* defined(VK_KHR_draw_indirect_count) */ +#if defined(VK_KHR_dynamic_rendering) + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR; +#endif /* defined(VK_KHR_dynamic_rendering) */ +#if defined(VK_KHR_external_fence_fd) + PFN_vkGetFenceFdKHR vkGetFenceFdKHR; + PFN_vkImportFenceFdKHR vkImportFenceFdKHR; +#endif /* defined(VK_KHR_external_fence_fd) */ +#if defined(VK_KHR_external_fence_win32) + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR; + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR; +#endif /* defined(VK_KHR_external_fence_win32) */ +#if defined(VK_KHR_external_memory_fd) + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR; +#endif /* defined(VK_KHR_external_memory_fd) */ +#if defined(VK_KHR_external_memory_win32) + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR; +#endif /* defined(VK_KHR_external_memory_win32) */ +#if defined(VK_KHR_external_semaphore_fd) + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR; + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR; +#endif /* defined(VK_KHR_external_semaphore_fd) */ +#if defined(VK_KHR_external_semaphore_win32) + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR; + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR; +#endif /* defined(VK_KHR_external_semaphore_win32) */ +#if defined(VK_KHR_fragment_shading_rate) + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR; +#endif /* defined(VK_KHR_fragment_shading_rate) */ +#if defined(VK_KHR_get_memory_requirements2) + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR; +#endif /* defined(VK_KHR_get_memory_requirements2) */ +#if defined(VK_KHR_maintenance1) + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR; +#endif /* defined(VK_KHR_maintenance1) */ +#if defined(VK_KHR_maintenance3) + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR; +#endif /* defined(VK_KHR_maintenance3) */ +#if defined(VK_KHR_maintenance4) + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR; +#endif /* defined(VK_KHR_maintenance4) */ +#if defined(VK_KHR_maintenance5) + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR; +#endif /* defined(VK_KHR_maintenance5) */ +#if defined(VK_KHR_map_memory2) + PFN_vkMapMemory2KHR vkMapMemory2KHR; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR; +#endif /* defined(VK_KHR_map_memory2) */ +#if defined(VK_KHR_performance_query) + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR; +#endif /* defined(VK_KHR_performance_query) */ +#if defined(VK_KHR_pipeline_executable_properties) + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR; + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR; +#endif /* defined(VK_KHR_pipeline_executable_properties) */ +#if defined(VK_KHR_present_wait) + PFN_vkWaitForPresentKHR vkWaitForPresentKHR; +#endif /* defined(VK_KHR_present_wait) */ +#if defined(VK_KHR_push_descriptor) + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR; +#endif /* defined(VK_KHR_push_descriptor) */ +#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR; +#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_ray_tracing_pipeline) + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR; + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR; +#endif /* defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_sampler_ycbcr_conversion) + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR; +#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ +#if defined(VK_KHR_shared_presentable_image) + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR; +#endif /* defined(VK_KHR_shared_presentable_image) */ +#if defined(VK_KHR_swapchain) + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; + PFN_vkQueuePresentKHR vkQueuePresentKHR; +#endif /* defined(VK_KHR_swapchain) */ +#if defined(VK_KHR_synchronization2) + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR; + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR; +#endif /* defined(VK_KHR_synchronization2) */ +#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD; +#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ +#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV; +#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_KHR_timeline_semaphore) + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR; +#endif /* defined(VK_KHR_timeline_semaphore) */ +#if defined(VK_KHR_video_decode_queue) + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR; +#endif /* defined(VK_KHR_video_decode_queue) */ +#if defined(VK_KHR_video_encode_queue) + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR; + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR; +#endif /* defined(VK_KHR_video_encode_queue) */ +#if defined(VK_KHR_video_queue) + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR; + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR; +#endif /* defined(VK_KHR_video_queue) */ +#if defined(VK_NVX_binary_import) + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX; + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX; +#endif /* defined(VK_NVX_binary_import) */ +#if defined(VK_NVX_image_view_handle) + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX; +#endif /* defined(VK_NVX_image_view_handle) */ +#if defined(VK_NV_clip_space_w_scaling) + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV; +#endif /* defined(VK_NV_clip_space_w_scaling) */ +#if defined(VK_NV_copy_memory_indirect) + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV; +#endif /* defined(VK_NV_copy_memory_indirect) */ +#if defined(VK_NV_cuda_kernel_launch) + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV; + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV; +#endif /* defined(VK_NV_cuda_kernel_launch) */ +#if defined(VK_NV_device_diagnostic_checkpoints) + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV; +#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_NV_device_generated_commands) + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV; + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV; +#endif /* defined(VK_NV_device_generated_commands) */ +#if defined(VK_NV_device_generated_commands_compute) + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV; + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV; +#endif /* defined(VK_NV_device_generated_commands_compute) */ +#if defined(VK_NV_external_memory_rdma) + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV; +#endif /* defined(VK_NV_external_memory_rdma) */ +#if defined(VK_NV_external_memory_win32) + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV; +#endif /* defined(VK_NV_external_memory_win32) */ +#if defined(VK_NV_fragment_shading_rate_enums) + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV; +#endif /* defined(VK_NV_fragment_shading_rate_enums) */ +#if defined(VK_NV_low_latency2) + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV; + PFN_vkLatencySleepNV vkLatencySleepNV; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV; + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV; +#endif /* defined(VK_NV_low_latency2) */ +#if defined(VK_NV_memory_decompression) + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV; + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV; +#endif /* defined(VK_NV_memory_decompression) */ +#if defined(VK_NV_mesh_shader) + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV; + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV; +#endif /* defined(VK_NV_mesh_shader) */ +#if defined(VK_NV_optical_flow) + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV; + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV; +#endif /* defined(VK_NV_optical_flow) */ +#if defined(VK_NV_ray_tracing) + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV; + PFN_vkCompileDeferredNV vkCompileDeferredNV; + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV; +#endif /* defined(VK_NV_ray_tracing) */ +#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV; +#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ +#if defined(VK_NV_scissor_exclusive) + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV; +#endif /* defined(VK_NV_scissor_exclusive) */ +#if defined(VK_NV_shading_rate_image) + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV; +#endif /* defined(VK_NV_shading_rate_image) */ +#if defined(VK_QCOM_tile_properties) + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM; + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM; +#endif /* defined(VK_QCOM_tile_properties) */ +#if defined(VK_QNX_external_memory_screen_buffer) + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX; +#endif /* defined(VK_QNX_external_memory_screen_buffer) */ +#if defined(VK_VALVE_descriptor_set_host_mapping) + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE; + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE; +#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT; + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT; + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ +#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT; +#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT; +#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ +#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT; +#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ +#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR; +#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR; +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR; +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ + /* VOLK_GENERATE_DEVICE_TABLE */ +}; + +/* VOLK_GENERATE_PROTOTYPES_H */ +#if defined(VK_VERSION_1_0) +extern PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; +extern PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; +extern PFN_vkAllocateMemory vkAllocateMemory; +extern PFN_vkBeginCommandBuffer vkBeginCommandBuffer; +extern PFN_vkBindBufferMemory vkBindBufferMemory; +extern PFN_vkBindImageMemory vkBindImageMemory; +extern PFN_vkCmdBeginQuery vkCmdBeginQuery; +extern PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; +extern PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; +extern PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; +extern PFN_vkCmdBindPipeline vkCmdBindPipeline; +extern PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; +extern PFN_vkCmdBlitImage vkCmdBlitImage; +extern PFN_vkCmdClearAttachments vkCmdClearAttachments; +extern PFN_vkCmdClearColorImage vkCmdClearColorImage; +extern PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; +extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; +extern PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; +extern PFN_vkCmdCopyImage vkCmdCopyImage; +extern PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; +extern PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; +extern PFN_vkCmdDispatch vkCmdDispatch; +extern PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; +extern PFN_vkCmdDraw vkCmdDraw; +extern PFN_vkCmdDrawIndexed vkCmdDrawIndexed; +extern PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; +extern PFN_vkCmdDrawIndirect vkCmdDrawIndirect; +extern PFN_vkCmdEndQuery vkCmdEndQuery; +extern PFN_vkCmdEndRenderPass vkCmdEndRenderPass; +extern PFN_vkCmdExecuteCommands vkCmdExecuteCommands; +extern PFN_vkCmdFillBuffer vkCmdFillBuffer; +extern PFN_vkCmdNextSubpass vkCmdNextSubpass; +extern PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; +extern PFN_vkCmdPushConstants vkCmdPushConstants; +extern PFN_vkCmdResetEvent vkCmdResetEvent; +extern PFN_vkCmdResetQueryPool vkCmdResetQueryPool; +extern PFN_vkCmdResolveImage vkCmdResolveImage; +extern PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; +extern PFN_vkCmdSetDepthBias vkCmdSetDepthBias; +extern PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; +extern PFN_vkCmdSetEvent vkCmdSetEvent; +extern PFN_vkCmdSetLineWidth vkCmdSetLineWidth; +extern PFN_vkCmdSetScissor vkCmdSetScissor; +extern PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; +extern PFN_vkCmdSetStencilReference vkCmdSetStencilReference; +extern PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; +extern PFN_vkCmdSetViewport vkCmdSetViewport; +extern PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; +extern PFN_vkCmdWaitEvents vkCmdWaitEvents; +extern PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; +extern PFN_vkCreateBuffer vkCreateBuffer; +extern PFN_vkCreateBufferView vkCreateBufferView; +extern PFN_vkCreateCommandPool vkCreateCommandPool; +extern PFN_vkCreateComputePipelines vkCreateComputePipelines; +extern PFN_vkCreateDescriptorPool vkCreateDescriptorPool; +extern PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; +extern PFN_vkCreateDevice vkCreateDevice; +extern PFN_vkCreateEvent vkCreateEvent; +extern PFN_vkCreateFence vkCreateFence; +extern PFN_vkCreateFramebuffer vkCreateFramebuffer; +extern PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; +extern PFN_vkCreateImage vkCreateImage; +extern PFN_vkCreateImageView vkCreateImageView; +extern PFN_vkCreateInstance vkCreateInstance; +extern PFN_vkCreatePipelineCache vkCreatePipelineCache; +extern PFN_vkCreatePipelineLayout vkCreatePipelineLayout; +extern PFN_vkCreateQueryPool vkCreateQueryPool; +extern PFN_vkCreateRenderPass vkCreateRenderPass; +extern PFN_vkCreateSampler vkCreateSampler; +extern PFN_vkCreateSemaphore vkCreateSemaphore; +extern PFN_vkCreateShaderModule vkCreateShaderModule; +extern PFN_vkDestroyBuffer vkDestroyBuffer; +extern PFN_vkDestroyBufferView vkDestroyBufferView; +extern PFN_vkDestroyCommandPool vkDestroyCommandPool; +extern PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; +extern PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; +extern PFN_vkDestroyDevice vkDestroyDevice; +extern PFN_vkDestroyEvent vkDestroyEvent; +extern PFN_vkDestroyFence vkDestroyFence; +extern PFN_vkDestroyFramebuffer vkDestroyFramebuffer; +extern PFN_vkDestroyImage vkDestroyImage; +extern PFN_vkDestroyImageView vkDestroyImageView; +extern PFN_vkDestroyInstance vkDestroyInstance; +extern PFN_vkDestroyPipeline vkDestroyPipeline; +extern PFN_vkDestroyPipelineCache vkDestroyPipelineCache; +extern PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; +extern PFN_vkDestroyQueryPool vkDestroyQueryPool; +extern PFN_vkDestroyRenderPass vkDestroyRenderPass; +extern PFN_vkDestroySampler vkDestroySampler; +extern PFN_vkDestroySemaphore vkDestroySemaphore; +extern PFN_vkDestroyShaderModule vkDestroyShaderModule; +extern PFN_vkDeviceWaitIdle vkDeviceWaitIdle; +extern PFN_vkEndCommandBuffer vkEndCommandBuffer; +extern PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; +extern PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; +extern PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; +extern PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; +extern PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; +extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; +extern PFN_vkFreeCommandBuffers vkFreeCommandBuffers; +extern PFN_vkFreeDescriptorSets vkFreeDescriptorSets; +extern PFN_vkFreeMemory vkFreeMemory; +extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; +extern PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; +extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; +extern PFN_vkGetDeviceQueue vkGetDeviceQueue; +extern PFN_vkGetEventStatus vkGetEventStatus; +extern PFN_vkGetFenceStatus vkGetFenceStatus; +extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; +extern PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; +extern PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; +extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; +extern PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; +extern PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; +extern PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; +extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; +extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; +extern PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; +extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; +extern PFN_vkGetPipelineCacheData vkGetPipelineCacheData; +extern PFN_vkGetQueryPoolResults vkGetQueryPoolResults; +extern PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; +extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; +extern PFN_vkMapMemory vkMapMemory; +extern PFN_vkMergePipelineCaches vkMergePipelineCaches; +extern PFN_vkQueueBindSparse vkQueueBindSparse; +extern PFN_vkQueueSubmit vkQueueSubmit; +extern PFN_vkQueueWaitIdle vkQueueWaitIdle; +extern PFN_vkResetCommandBuffer vkResetCommandBuffer; +extern PFN_vkResetCommandPool vkResetCommandPool; +extern PFN_vkResetDescriptorPool vkResetDescriptorPool; +extern PFN_vkResetEvent vkResetEvent; +extern PFN_vkResetFences vkResetFences; +extern PFN_vkSetEvent vkSetEvent; +extern PFN_vkUnmapMemory vkUnmapMemory; +extern PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; +extern PFN_vkWaitForFences vkWaitForFences; +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_VERSION_1_1) +extern PFN_vkBindBufferMemory2 vkBindBufferMemory2; +extern PFN_vkBindImageMemory2 vkBindImageMemory2; +extern PFN_vkCmdDispatchBase vkCmdDispatchBase; +extern PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask; +extern PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate; +extern PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion; +extern PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate; +extern PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion; +extern PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion; +extern PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups; +extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; +extern PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport; +extern PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures; +extern PFN_vkGetDeviceQueue2 vkGetDeviceQueue2; +extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; +extern PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2; +extern PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties; +extern PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties; +extern PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties; +extern PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2; +extern PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2; +extern PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2; +extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; +extern PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2; +extern PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2; +extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2; +extern PFN_vkTrimCommandPool vkTrimCommandPool; +extern PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate; +#endif /* defined(VK_VERSION_1_1) */ +#if defined(VK_VERSION_1_2) +extern PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2; +extern PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount; +extern PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount; +extern PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2; +extern PFN_vkCmdNextSubpass2 vkCmdNextSubpass2; +extern PFN_vkCreateRenderPass2 vkCreateRenderPass2; +extern PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress; +extern PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress; +extern PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress; +extern PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue; +extern PFN_vkResetQueryPool vkResetQueryPool; +extern PFN_vkSignalSemaphore vkSignalSemaphore; +extern PFN_vkWaitSemaphores vkWaitSemaphores; +#endif /* defined(VK_VERSION_1_2) */ +#if defined(VK_VERSION_1_3) +extern PFN_vkCmdBeginRendering vkCmdBeginRendering; +extern PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2; +extern PFN_vkCmdBlitImage2 vkCmdBlitImage2; +extern PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2; +extern PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2; +extern PFN_vkCmdCopyImage2 vkCmdCopyImage2; +extern PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2; +extern PFN_vkCmdEndRendering vkCmdEndRendering; +extern PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2; +extern PFN_vkCmdResetEvent2 vkCmdResetEvent2; +extern PFN_vkCmdResolveImage2 vkCmdResolveImage2; +extern PFN_vkCmdSetCullMode vkCmdSetCullMode; +extern PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable; +extern PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable; +extern PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp; +extern PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable; +extern PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable; +extern PFN_vkCmdSetEvent2 vkCmdSetEvent2; +extern PFN_vkCmdSetFrontFace vkCmdSetFrontFace; +extern PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable; +extern PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology; +extern PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable; +extern PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount; +extern PFN_vkCmdSetStencilOp vkCmdSetStencilOp; +extern PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable; +extern PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount; +extern PFN_vkCmdWaitEvents2 vkCmdWaitEvents2; +extern PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2; +extern PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot; +extern PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot; +extern PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements; +extern PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements; +extern PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements; +extern PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties; +extern PFN_vkGetPrivateData vkGetPrivateData; +extern PFN_vkQueueSubmit2 vkQueueSubmit2; +extern PFN_vkSetPrivateData vkSetPrivateData; +#endif /* defined(VK_VERSION_1_3) */ +#if defined(VK_AMDX_shader_enqueue) +extern PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX; +extern PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX; +extern PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX; +extern PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX; +extern PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX; +extern PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX; +extern PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX; +#endif /* defined(VK_AMDX_shader_enqueue) */ +#if defined(VK_AMD_buffer_marker) +extern PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD; +#endif /* defined(VK_AMD_buffer_marker) */ +#if defined(VK_AMD_display_native_hdr) +extern PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD; +#endif /* defined(VK_AMD_display_native_hdr) */ +#if defined(VK_AMD_draw_indirect_count) +extern PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD; +extern PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD; +#endif /* defined(VK_AMD_draw_indirect_count) */ +#if defined(VK_AMD_shader_info) +extern PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD; +#endif /* defined(VK_AMD_shader_info) */ +#if defined(VK_ANDROID_external_memory_android_hardware_buffer) +extern PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID; +extern PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID; +#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ +#if defined(VK_EXT_acquire_drm_display) +extern PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT; +extern PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT; +#endif /* defined(VK_EXT_acquire_drm_display) */ +#if defined(VK_EXT_acquire_xlib_display) +extern PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT; +extern PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT; +#endif /* defined(VK_EXT_acquire_xlib_display) */ +#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) +extern PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT; +#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ +#if defined(VK_EXT_buffer_device_address) +extern PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT; +#endif /* defined(VK_EXT_buffer_device_address) */ +#if defined(VK_EXT_calibrated_timestamps) +extern PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT; +extern PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; +#endif /* defined(VK_EXT_calibrated_timestamps) */ +#if defined(VK_EXT_color_write_enable) +extern PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT; +#endif /* defined(VK_EXT_color_write_enable) */ +#if defined(VK_EXT_conditional_rendering) +extern PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT; +extern PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT; +#endif /* defined(VK_EXT_conditional_rendering) */ +#if defined(VK_EXT_debug_marker) +extern PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT; +extern PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT; +extern PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT; +extern PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT; +extern PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT; +#endif /* defined(VK_EXT_debug_marker) */ +#if defined(VK_EXT_debug_report) +extern PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT; +extern PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT; +extern PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT; +#endif /* defined(VK_EXT_debug_report) */ +#if defined(VK_EXT_debug_utils) +extern PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT; +extern PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT; +extern PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT; +extern PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; +extern PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT; +extern PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT; +extern PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT; +extern PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT; +extern PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT; +extern PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT; +extern PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT; +#endif /* defined(VK_EXT_debug_utils) */ +#if defined(VK_EXT_depth_bias_control) +extern PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT; +#endif /* defined(VK_EXT_depth_bias_control) */ +#if defined(VK_EXT_descriptor_buffer) +extern PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT; +extern PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT; +extern PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT; +extern PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT; +extern PFN_vkGetDescriptorEXT vkGetDescriptorEXT; +extern PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT; +extern PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT; +extern PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT; +extern PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT; +extern PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT; +#endif /* defined(VK_EXT_descriptor_buffer) */ +#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) +extern PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT; +#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ +#if defined(VK_EXT_device_fault) +extern PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT; +#endif /* defined(VK_EXT_device_fault) */ +#if defined(VK_EXT_direct_mode_display) +extern PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT; +#endif /* defined(VK_EXT_direct_mode_display) */ +#if defined(VK_EXT_directfb_surface) +extern PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT; +extern PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT; +#endif /* defined(VK_EXT_directfb_surface) */ +#if defined(VK_EXT_discard_rectangles) +extern PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT; +#endif /* defined(VK_EXT_discard_rectangles) */ +#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 +extern PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT; +extern PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT; +#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ +#if defined(VK_EXT_display_control) +extern PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT; +extern PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT; +extern PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT; +extern PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT; +#endif /* defined(VK_EXT_display_control) */ +#if defined(VK_EXT_display_surface_counter) +extern PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT; +#endif /* defined(VK_EXT_display_surface_counter) */ +#if defined(VK_EXT_external_memory_host) +extern PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT; +#endif /* defined(VK_EXT_external_memory_host) */ +#if defined(VK_EXT_full_screen_exclusive) +extern PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT; +extern PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT; +extern PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT; +#endif /* defined(VK_EXT_full_screen_exclusive) */ +#if defined(VK_EXT_hdr_metadata) +extern PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT; +#endif /* defined(VK_EXT_hdr_metadata) */ +#if defined(VK_EXT_headless_surface) +extern PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT; +#endif /* defined(VK_EXT_headless_surface) */ +#if defined(VK_EXT_host_image_copy) +extern PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT; +extern PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT; +extern PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT; +extern PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT; +#endif /* defined(VK_EXT_host_image_copy) */ +#if defined(VK_EXT_host_query_reset) +extern PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; +#endif /* defined(VK_EXT_host_query_reset) */ +#if defined(VK_EXT_image_drm_format_modifier) +extern PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT; +#endif /* defined(VK_EXT_image_drm_format_modifier) */ +#if defined(VK_EXT_line_rasterization) +extern PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT; +#endif /* defined(VK_EXT_line_rasterization) */ +#if defined(VK_EXT_mesh_shader) +extern PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT; +extern PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT; +extern PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT; +#endif /* defined(VK_EXT_mesh_shader) */ +#if defined(VK_EXT_metal_objects) +extern PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT; +#endif /* defined(VK_EXT_metal_objects) */ +#if defined(VK_EXT_metal_surface) +extern PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT; +#endif /* defined(VK_EXT_metal_surface) */ +#if defined(VK_EXT_multi_draw) +extern PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT; +extern PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT; +#endif /* defined(VK_EXT_multi_draw) */ +#if defined(VK_EXT_opacity_micromap) +extern PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT; +extern PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT; +extern PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT; +extern PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT; +extern PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT; +extern PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT; +extern PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT; +extern PFN_vkCopyMicromapEXT vkCopyMicromapEXT; +extern PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT; +extern PFN_vkCreateMicromapEXT vkCreateMicromapEXT; +extern PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT; +extern PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT; +extern PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT; +extern PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT; +#endif /* defined(VK_EXT_opacity_micromap) */ +#if defined(VK_EXT_pageable_device_local_memory) +extern PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT; +#endif /* defined(VK_EXT_pageable_device_local_memory) */ +#if defined(VK_EXT_pipeline_properties) +extern PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT; +#endif /* defined(VK_EXT_pipeline_properties) */ +#if defined(VK_EXT_private_data) +extern PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT; +extern PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT; +extern PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT; +extern PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT; +#endif /* defined(VK_EXT_private_data) */ +#if defined(VK_EXT_sample_locations) +extern PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT; +extern PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT; +#endif /* defined(VK_EXT_sample_locations) */ +#if defined(VK_EXT_shader_module_identifier) +extern PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT; +extern PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT; +#endif /* defined(VK_EXT_shader_module_identifier) */ +#if defined(VK_EXT_shader_object) +extern PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT; +extern PFN_vkCreateShadersEXT vkCreateShadersEXT; +extern PFN_vkDestroyShaderEXT vkDestroyShaderEXT; +extern PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT; +#endif /* defined(VK_EXT_shader_object) */ +#if defined(VK_EXT_swapchain_maintenance1) +extern PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT; +#endif /* defined(VK_EXT_swapchain_maintenance1) */ +#if defined(VK_EXT_tooling_info) +extern PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT; +#endif /* defined(VK_EXT_tooling_info) */ +#if defined(VK_EXT_transform_feedback) +extern PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT; +extern PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; +extern PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; +extern PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT; +extern PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT; +extern PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; +#endif /* defined(VK_EXT_transform_feedback) */ +#if defined(VK_EXT_validation_cache) +extern PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT; +extern PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT; +extern PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT; +extern PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT; +#endif /* defined(VK_EXT_validation_cache) */ +#if defined(VK_FUCHSIA_buffer_collection) +extern PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA; +extern PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA; +extern PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA; +extern PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA; +extern PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA; +#endif /* defined(VK_FUCHSIA_buffer_collection) */ +#if defined(VK_FUCHSIA_external_memory) +extern PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA; +extern PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA; +#endif /* defined(VK_FUCHSIA_external_memory) */ +#if defined(VK_FUCHSIA_external_semaphore) +extern PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA; +extern PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA; +#endif /* defined(VK_FUCHSIA_external_semaphore) */ +#if defined(VK_FUCHSIA_imagepipe_surface) +extern PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA; +#endif /* defined(VK_FUCHSIA_imagepipe_surface) */ +#if defined(VK_GGP_stream_descriptor_surface) +extern PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP; +#endif /* defined(VK_GGP_stream_descriptor_surface) */ +#if defined(VK_GOOGLE_display_timing) +extern PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE; +extern PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE; +#endif /* defined(VK_GOOGLE_display_timing) */ +#if defined(VK_HUAWEI_cluster_culling_shader) +extern PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI; +extern PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI; +#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ +#if defined(VK_HUAWEI_invocation_mask) +extern PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI; +#endif /* defined(VK_HUAWEI_invocation_mask) */ +#if defined(VK_HUAWEI_subpass_shading) +extern PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI; +extern PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI; +#endif /* defined(VK_HUAWEI_subpass_shading) */ +#if defined(VK_INTEL_performance_query) +extern PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL; +extern PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL; +extern PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL; +extern PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL; +extern PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL; +extern PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL; +extern PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL; +extern PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL; +extern PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL; +#endif /* defined(VK_INTEL_performance_query) */ +#if defined(VK_KHR_acceleration_structure) +extern PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR; +extern PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR; +extern PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR; +extern PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR; +extern PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR; +extern PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR; +extern PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR; +extern PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR; +extern PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR; +extern PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR; +extern PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR; +extern PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR; +extern PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR; +extern PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR; +extern PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR; +extern PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR; +#endif /* defined(VK_KHR_acceleration_structure) */ +#if defined(VK_KHR_android_surface) +extern PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR; +#endif /* defined(VK_KHR_android_surface) */ +#if defined(VK_KHR_bind_memory2) +extern PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR; +extern PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR; +#endif /* defined(VK_KHR_bind_memory2) */ +#if defined(VK_KHR_buffer_device_address) +extern PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR; +extern PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR; +extern PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR; +#endif /* defined(VK_KHR_buffer_device_address) */ +#if defined(VK_KHR_cooperative_matrix) +extern PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR; +#endif /* defined(VK_KHR_cooperative_matrix) */ +#if defined(VK_KHR_copy_commands2) +extern PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR; +extern PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR; +extern PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR; +extern PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR; +extern PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR; +extern PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR; +#endif /* defined(VK_KHR_copy_commands2) */ +#if defined(VK_KHR_create_renderpass2) +extern PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR; +extern PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR; +extern PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR; +extern PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR; +#endif /* defined(VK_KHR_create_renderpass2) */ +#if defined(VK_KHR_deferred_host_operations) +extern PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR; +extern PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR; +extern PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR; +extern PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR; +extern PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR; +#endif /* defined(VK_KHR_deferred_host_operations) */ +#if defined(VK_KHR_descriptor_update_template) +extern PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; +extern PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; +extern PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; +#endif /* defined(VK_KHR_descriptor_update_template) */ +#if defined(VK_KHR_device_group) +extern PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR; +extern PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR; +extern PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR; +#endif /* defined(VK_KHR_device_group) */ +#if defined(VK_KHR_device_group_creation) +extern PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR; +#endif /* defined(VK_KHR_device_group_creation) */ +#if defined(VK_KHR_display) +extern PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR; +extern PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR; +extern PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR; +extern PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR; +extern PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR; +extern PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR; +extern PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR; +#endif /* defined(VK_KHR_display) */ +#if defined(VK_KHR_display_swapchain) +extern PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR; +#endif /* defined(VK_KHR_display_swapchain) */ +#if defined(VK_KHR_draw_indirect_count) +extern PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR; +extern PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR; +#endif /* defined(VK_KHR_draw_indirect_count) */ +#if defined(VK_KHR_dynamic_rendering) +extern PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR; +extern PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR; +#endif /* defined(VK_KHR_dynamic_rendering) */ +#if defined(VK_KHR_external_fence_capabilities) +extern PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR; +#endif /* defined(VK_KHR_external_fence_capabilities) */ +#if defined(VK_KHR_external_fence_fd) +extern PFN_vkGetFenceFdKHR vkGetFenceFdKHR; +extern PFN_vkImportFenceFdKHR vkImportFenceFdKHR; +#endif /* defined(VK_KHR_external_fence_fd) */ +#if defined(VK_KHR_external_fence_win32) +extern PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR; +extern PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR; +#endif /* defined(VK_KHR_external_fence_win32) */ +#if defined(VK_KHR_external_memory_capabilities) +extern PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR; +#endif /* defined(VK_KHR_external_memory_capabilities) */ +#if defined(VK_KHR_external_memory_fd) +extern PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR; +extern PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR; +#endif /* defined(VK_KHR_external_memory_fd) */ +#if defined(VK_KHR_external_memory_win32) +extern PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR; +extern PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR; +#endif /* defined(VK_KHR_external_memory_win32) */ +#if defined(VK_KHR_external_semaphore_capabilities) +extern PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; +#endif /* defined(VK_KHR_external_semaphore_capabilities) */ +#if defined(VK_KHR_external_semaphore_fd) +extern PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR; +extern PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR; +#endif /* defined(VK_KHR_external_semaphore_fd) */ +#if defined(VK_KHR_external_semaphore_win32) +extern PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR; +extern PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR; +#endif /* defined(VK_KHR_external_semaphore_win32) */ +#if defined(VK_KHR_fragment_shading_rate) +extern PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR; +extern PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR; +#endif /* defined(VK_KHR_fragment_shading_rate) */ +#if defined(VK_KHR_get_display_properties2) +extern PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR; +extern PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR; +extern PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR; +extern PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR; +#endif /* defined(VK_KHR_get_display_properties2) */ +#if defined(VK_KHR_get_memory_requirements2) +extern PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR; +extern PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR; +extern PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR; +#endif /* defined(VK_KHR_get_memory_requirements2) */ +#if defined(VK_KHR_get_physical_device_properties2) +extern PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; +extern PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR; +extern PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR; +extern PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR; +extern PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; +extern PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR; +extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR; +#endif /* defined(VK_KHR_get_physical_device_properties2) */ +#if defined(VK_KHR_get_surface_capabilities2) +extern PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR; +extern PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR; +#endif /* defined(VK_KHR_get_surface_capabilities2) */ +#if defined(VK_KHR_maintenance1) +extern PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR; +#endif /* defined(VK_KHR_maintenance1) */ +#if defined(VK_KHR_maintenance3) +extern PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR; +#endif /* defined(VK_KHR_maintenance3) */ +#if defined(VK_KHR_maintenance4) +extern PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR; +extern PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR; +extern PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR; +#endif /* defined(VK_KHR_maintenance4) */ +#if defined(VK_KHR_maintenance5) +extern PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR; +extern PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR; +extern PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR; +extern PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR; +#endif /* defined(VK_KHR_maintenance5) */ +#if defined(VK_KHR_map_memory2) +extern PFN_vkMapMemory2KHR vkMapMemory2KHR; +extern PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR; +#endif /* defined(VK_KHR_map_memory2) */ +#if defined(VK_KHR_performance_query) +extern PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR; +extern PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR; +extern PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR; +extern PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR; +#endif /* defined(VK_KHR_performance_query) */ +#if defined(VK_KHR_pipeline_executable_properties) +extern PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR; +extern PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR; +extern PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR; +#endif /* defined(VK_KHR_pipeline_executable_properties) */ +#if defined(VK_KHR_present_wait) +extern PFN_vkWaitForPresentKHR vkWaitForPresentKHR; +#endif /* defined(VK_KHR_present_wait) */ +#if defined(VK_KHR_push_descriptor) +extern PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR; +#endif /* defined(VK_KHR_push_descriptor) */ +#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) +extern PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR; +#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_ray_tracing_pipeline) +extern PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR; +extern PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR; +extern PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR; +extern PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR; +extern PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR; +extern PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR; +extern PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR; +#endif /* defined(VK_KHR_ray_tracing_pipeline) */ +#if defined(VK_KHR_sampler_ycbcr_conversion) +extern PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR; +extern PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR; +#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ +#if defined(VK_KHR_shared_presentable_image) +extern PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR; +#endif /* defined(VK_KHR_shared_presentable_image) */ +#if defined(VK_KHR_surface) +extern PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; +extern PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; +extern PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; +extern PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; +extern PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; +#endif /* defined(VK_KHR_surface) */ +#if defined(VK_KHR_swapchain) +extern PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; +extern PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; +extern PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; +extern PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; +extern PFN_vkQueuePresentKHR vkQueuePresentKHR; +#endif /* defined(VK_KHR_swapchain) */ +#if defined(VK_KHR_synchronization2) +extern PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR; +extern PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR; +extern PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR; +extern PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR; +extern PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR; +extern PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR; +#endif /* defined(VK_KHR_synchronization2) */ +#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) +extern PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD; +#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ +#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) +extern PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV; +#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_KHR_timeline_semaphore) +extern PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR; +extern PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR; +extern PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR; +#endif /* defined(VK_KHR_timeline_semaphore) */ +#if defined(VK_KHR_video_decode_queue) +extern PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR; +#endif /* defined(VK_KHR_video_decode_queue) */ +#if defined(VK_KHR_video_encode_queue) +extern PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR; +extern PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR; +extern PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR; +#endif /* defined(VK_KHR_video_encode_queue) */ +#if defined(VK_KHR_video_queue) +extern PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR; +extern PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR; +extern PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR; +extern PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR; +extern PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR; +extern PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR; +extern PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR; +extern PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR; +extern PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR; +extern PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR; +extern PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR; +extern PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR; +#endif /* defined(VK_KHR_video_queue) */ +#if defined(VK_KHR_wayland_surface) +extern PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR; +extern PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR; +#endif /* defined(VK_KHR_wayland_surface) */ +#if defined(VK_KHR_win32_surface) +extern PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR; +extern PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR; +#endif /* defined(VK_KHR_win32_surface) */ +#if defined(VK_KHR_xcb_surface) +extern PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR; +extern PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR; +#endif /* defined(VK_KHR_xcb_surface) */ +#if defined(VK_KHR_xlib_surface) +extern PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR; +extern PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR; +#endif /* defined(VK_KHR_xlib_surface) */ +#if defined(VK_MVK_ios_surface) +extern PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK; +#endif /* defined(VK_MVK_ios_surface) */ +#if defined(VK_MVK_macos_surface) +extern PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK; +#endif /* defined(VK_MVK_macos_surface) */ +#if defined(VK_NN_vi_surface) +extern PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN; +#endif /* defined(VK_NN_vi_surface) */ +#if defined(VK_NVX_binary_import) +extern PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX; +extern PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX; +extern PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX; +extern PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX; +extern PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX; +#endif /* defined(VK_NVX_binary_import) */ +#if defined(VK_NVX_image_view_handle) +extern PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX; +extern PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX; +#endif /* defined(VK_NVX_image_view_handle) */ +#if defined(VK_NV_acquire_winrt_display) +extern PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV; +extern PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV; +#endif /* defined(VK_NV_acquire_winrt_display) */ +#if defined(VK_NV_clip_space_w_scaling) +extern PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV; +#endif /* defined(VK_NV_clip_space_w_scaling) */ +#if defined(VK_NV_cooperative_matrix) +extern PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV; +#endif /* defined(VK_NV_cooperative_matrix) */ +#if defined(VK_NV_copy_memory_indirect) +extern PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV; +extern PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV; +#endif /* defined(VK_NV_copy_memory_indirect) */ +#if defined(VK_NV_coverage_reduction_mode) +extern PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV; +#endif /* defined(VK_NV_coverage_reduction_mode) */ +#if defined(VK_NV_cuda_kernel_launch) +extern PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV; +extern PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV; +extern PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV; +extern PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV; +extern PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV; +extern PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV; +#endif /* defined(VK_NV_cuda_kernel_launch) */ +#if defined(VK_NV_device_diagnostic_checkpoints) +extern PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV; +extern PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV; +#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ +#if defined(VK_NV_device_generated_commands) +extern PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV; +extern PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV; +extern PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV; +extern PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV; +extern PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV; +extern PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV; +#endif /* defined(VK_NV_device_generated_commands) */ +#if defined(VK_NV_device_generated_commands_compute) +extern PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV; +extern PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV; +extern PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV; +#endif /* defined(VK_NV_device_generated_commands_compute) */ +#if defined(VK_NV_external_memory_capabilities) +extern PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV; +#endif /* defined(VK_NV_external_memory_capabilities) */ +#if defined(VK_NV_external_memory_rdma) +extern PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV; +#endif /* defined(VK_NV_external_memory_rdma) */ +#if defined(VK_NV_external_memory_win32) +extern PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV; +#endif /* defined(VK_NV_external_memory_win32) */ +#if defined(VK_NV_fragment_shading_rate_enums) +extern PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV; +#endif /* defined(VK_NV_fragment_shading_rate_enums) */ +#if defined(VK_NV_low_latency2) +extern PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV; +extern PFN_vkLatencySleepNV vkLatencySleepNV; +extern PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV; +extern PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV; +extern PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV; +#endif /* defined(VK_NV_low_latency2) */ +#if defined(VK_NV_memory_decompression) +extern PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV; +extern PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV; +#endif /* defined(VK_NV_memory_decompression) */ +#if defined(VK_NV_mesh_shader) +extern PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV; +extern PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV; +extern PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV; +#endif /* defined(VK_NV_mesh_shader) */ +#if defined(VK_NV_optical_flow) +extern PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV; +extern PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV; +extern PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV; +extern PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV; +extern PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV; +#endif /* defined(VK_NV_optical_flow) */ +#if defined(VK_NV_ray_tracing) +extern PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV; +extern PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV; +extern PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV; +extern PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV; +extern PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV; +extern PFN_vkCompileDeferredNV vkCompileDeferredNV; +extern PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV; +extern PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV; +extern PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV; +extern PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV; +extern PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV; +extern PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV; +#endif /* defined(VK_NV_ray_tracing) */ +#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 +extern PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV; +#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ +#if defined(VK_NV_scissor_exclusive) +extern PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV; +#endif /* defined(VK_NV_scissor_exclusive) */ +#if defined(VK_NV_shading_rate_image) +extern PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV; +extern PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV; +extern PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV; +#endif /* defined(VK_NV_shading_rate_image) */ +#if defined(VK_QCOM_tile_properties) +extern PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM; +extern PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM; +#endif /* defined(VK_QCOM_tile_properties) */ +#if defined(VK_QNX_external_memory_screen_buffer) +extern PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX; +#endif /* defined(VK_QNX_external_memory_screen_buffer) */ +#if defined(VK_QNX_screen_surface) +extern PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX; +extern PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX; +#endif /* defined(VK_QNX_screen_surface) */ +#if defined(VK_VALVE_descriptor_set_host_mapping) +extern PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE; +extern PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE; +#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ +#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) +extern PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT; +extern PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT; +extern PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT; +extern PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT; +extern PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT; +extern PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT; +extern PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT; +extern PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT; +extern PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT; +extern PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT; +extern PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT; +extern PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) +extern PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT; +extern PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT; +extern PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT; +extern PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT; +extern PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) +extern PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT; +extern PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT; +extern PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT; +extern PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT; +extern PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT; +extern PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT; +extern PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT; +extern PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT; +extern PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT; +extern PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT; +extern PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT; +extern PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT; +extern PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT; +extern PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT; +extern PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT; +extern PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT; +extern PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT; +extern PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT; +extern PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT; +extern PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT; +extern PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT; +#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) +extern PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) +extern PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) +extern PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV; +extern PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) +extern PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV; +extern PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV; +extern PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) +extern PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) +extern PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ +#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) +extern PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV; +#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ +#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) +extern PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT; +#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) +extern PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT; +#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ +#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) +extern PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT; +#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ +#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) +extern PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR; +#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) +extern PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR; +extern PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR; +extern PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR; +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ +#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) +extern PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR; +#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ +/* VOLK_GENERATE_PROTOTYPES_H */ + +#ifdef __cplusplus +} +#endif + +#endif + +#ifdef VOLK_IMPLEMENTATION +#undef VOLK_IMPLEMENTATION +/* Prevent tools like dependency checkers from detecting a cyclic dependency */ +#define VOLK_SOURCE "volk.c" +#include VOLK_SOURCE +#endif + +/** + * Copyright (c) 2018-2023 Arseny Kapoulkine + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. +*/ +/* clang-format on */ diff --git a/MacroLibX/third_party/vulkan/vk_icd.h b/MacroLibX/third_party/vulkan/vk_icd.h new file mode 100644 index 0000000..59204a3 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vk_icd.h @@ -0,0 +1,244 @@ +/* + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. + * + * SPDX-License-Identifier: Apache-2.0 + */ +#pragma once + +#include "vulkan.h" +#include + +// Loader-ICD version negotiation API. Versions add the following features: +// Version 0 - Initial. Doesn't support vk_icdGetInstanceProcAddr +// or vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 1 - Add support for vk_icdGetInstanceProcAddr. +// Version 2 - Add Loader/ICD Interface version negotiation +// via vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 3 - Add ICD creation/destruction of KHR_surface objects. +// Version 4 - Add unknown physical device extension querying via +// vk_icdGetPhysicalDeviceProcAddr. +// Version 5 - Tells ICDs that the loader is now paying attention to the +// application version of Vulkan passed into the ApplicationInfo +// structure during vkCreateInstance. This will tell the ICD +// that if the loader is older, it should automatically fail a +// call for any API version > 1.0. Otherwise, the loader will +// manually determine if it can support the expected version. +// Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices. +// Version 7 - If an ICD supports any of the following functions, they must be +// queryable with vk_icdGetInstanceProcAddr: +// vk_icdNegotiateLoaderICDInterfaceVersion +// vk_icdGetPhysicalDeviceProcAddr +// vk_icdEnumerateAdapterPhysicalDevices (Windows only) +// In addition, these functions no longer need to be exported directly. +// This version allows drivers provided through the extension +// VK_LUNARG_direct_driver_loading be able to support the entire +// Driver-Loader interface. + +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 7 +#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 +#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 + +// Old typedefs that don't follow a proper naming convention but are preserved for compatibility +typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); +// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this +// file directly, it won't be found. +#ifndef PFN_GetPhysicalDeviceProcAddr +typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName); +#endif + +// Typedefs for loader/ICD interface +typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) +typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif + +// Prototypes for loader/ICD interface +#if !defined(VK_NO_PROTOTYPES) +#ifdef __cplusplus +extern "C" { +#endif + VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) + VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif +#ifdef __cplusplus +} +#endif +#endif + +/* + * The ICD must reserve space for a pointer for the loader's dispatch + * table, at the start of . + * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro. + */ + +#define ICD_LOADER_MAGIC 0x01CDC0DE + +typedef union { + uintptr_t loaderMagic; + void *loaderData; +} VK_LOADER_DATA; + +static inline void set_loader_magic_value(void *pNewObject) { + VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + loader_info->loaderMagic = ICD_LOADER_MAGIC; +} + +static inline bool valid_loader_magic_value(void *pNewObject) { + const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC; +} + +/* + * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that + * contains the platform-specific connection and surface information. + */ +typedef enum { + VK_ICD_WSI_PLATFORM_MIR, + VK_ICD_WSI_PLATFORM_WAYLAND, + VK_ICD_WSI_PLATFORM_WIN32, + VK_ICD_WSI_PLATFORM_XCB, + VK_ICD_WSI_PLATFORM_XLIB, + VK_ICD_WSI_PLATFORM_ANDROID, + VK_ICD_WSI_PLATFORM_MACOS, + VK_ICD_WSI_PLATFORM_IOS, + VK_ICD_WSI_PLATFORM_DISPLAY, + VK_ICD_WSI_PLATFORM_HEADLESS, + VK_ICD_WSI_PLATFORM_METAL, + VK_ICD_WSI_PLATFORM_DIRECTFB, + VK_ICD_WSI_PLATFORM_VI, + VK_ICD_WSI_PLATFORM_GGP, + VK_ICD_WSI_PLATFORM_SCREEN, + VK_ICD_WSI_PLATFORM_FUCHSIA, +} VkIcdWsiPlatform; + +typedef struct { + VkIcdWsiPlatform platform; +} VkIcdSurfaceBase; + +#ifdef VK_USE_PLATFORM_MIR_KHR +typedef struct { + VkIcdSurfaceBase base; + MirConnection *connection; + MirSurface *mirSurface; +} VkIcdSurfaceMir; +#endif // VK_USE_PLATFORM_MIR_KHR + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +typedef struct { + VkIcdSurfaceBase base; + struct wl_display *display; + struct wl_surface *surface; +} VkIcdSurfaceWayland; +#endif // VK_USE_PLATFORM_WAYLAND_KHR + +#ifdef VK_USE_PLATFORM_WIN32_KHR +typedef struct { + VkIcdSurfaceBase base; + HINSTANCE hinstance; + HWND hwnd; +} VkIcdSurfaceWin32; +#endif // VK_USE_PLATFORM_WIN32_KHR + +#ifdef VK_USE_PLATFORM_XCB_KHR +typedef struct { + VkIcdSurfaceBase base; + xcb_connection_t *connection; + xcb_window_t window; +} VkIcdSurfaceXcb; +#endif // VK_USE_PLATFORM_XCB_KHR + +#ifdef VK_USE_PLATFORM_XLIB_KHR +typedef struct { + VkIcdSurfaceBase base; + Display *dpy; + Window window; +} VkIcdSurfaceXlib; +#endif // VK_USE_PLATFORM_XLIB_KHR + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +typedef struct { + VkIcdSurfaceBase base; + IDirectFB *dfb; + IDirectFBSurface *surface; +} VkIcdSurfaceDirectFB; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +typedef struct { + VkIcdSurfaceBase base; + struct ANativeWindow *window; +} VkIcdSurfaceAndroid; +#endif // VK_USE_PLATFORM_ANDROID_KHR + +#ifdef VK_USE_PLATFORM_MACOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceMacOS; +#endif // VK_USE_PLATFORM_MACOS_MVK + +#ifdef VK_USE_PLATFORM_IOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceIOS; +#endif // VK_USE_PLATFORM_IOS_MVK + +#ifdef VK_USE_PLATFORM_GGP +typedef struct { + VkIcdSurfaceBase base; + GgpStreamDescriptor streamDescriptor; +} VkIcdSurfaceGgp; +#endif // VK_USE_PLATFORM_GGP + +typedef struct { + VkIcdSurfaceBase base; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkIcdSurfaceDisplay; + +typedef struct { + VkIcdSurfaceBase base; +} VkIcdSurfaceHeadless; + +#ifdef VK_USE_PLATFORM_METAL_EXT +typedef struct { + VkIcdSurfaceBase base; + const CAMetalLayer *pLayer; +} VkIcdSurfaceMetal; +#endif // VK_USE_PLATFORM_METAL_EXT + +#ifdef VK_USE_PLATFORM_VI_NN +typedef struct { + VkIcdSurfaceBase base; + void *window; +} VkIcdSurfaceVi; +#endif // VK_USE_PLATFORM_VI_NN + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +typedef struct { + VkIcdSurfaceBase base; + struct _screen_context *context; + struct _screen_window *window; +} VkIcdSurfaceScreen; +#endif // VK_USE_PLATFORM_SCREEN_QNX + +#ifdef VK_USE_PLATFORM_FUCHSIA +typedef struct { + VkIcdSurfaceBase base; +} VkIcdSurfaceImagePipe; +#endif // VK_USE_PLATFORM_FUCHSIA diff --git a/MacroLibX/third_party/vulkan/vk_layer.h b/MacroLibX/third_party/vulkan/vk_layer.h new file mode 100644 index 0000000..19d88fc --- /dev/null +++ b/MacroLibX/third_party/vulkan/vk_layer.h @@ -0,0 +1,189 @@ +/* + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. + * + * SPDX-License-Identifier: Apache-2.0 + */ +#pragma once + +/* Need to define dispatch table + * Core struct can then have ptr to dispatch table at the top + * Along with object ptrs for current and next OBJ + */ + +#include "vulkan_core.h" + +#define MAX_NUM_UNKNOWN_EXTS 250 + + // Loader-Layer version negotiation API. Versions add the following features: + // Versions 0/1 - Initial. Doesn't support vk_layerGetPhysicalDeviceProcAddr + // or vk_icdNegotiateLoaderLayerInterfaceVersion. + // Version 2 - Add support for vk_layerGetPhysicalDeviceProcAddr and + // vk_icdNegotiateLoaderLayerInterfaceVersion. +#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2 +#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1 + +#define VK_CURRENT_CHAIN_VERSION 1 + +// Typedef for use in the interfaces below +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); + +// Version negotiation values +typedef enum VkNegotiateLayerStructType { + LAYER_NEGOTIATE_UNINTIALIZED = 0, + LAYER_NEGOTIATE_INTERFACE_STRUCT = 1, +} VkNegotiateLayerStructType; + +// Version negotiation structures +typedef struct VkNegotiateLayerInterface { + VkNegotiateLayerStructType sType; + void *pNext; + uint32_t loaderLayerInterfaceVersion; + PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr; +} VkNegotiateLayerInterface; + +// Version negotiation functions +typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct); + +// Function prototype for unknown physical device extension command +typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device); + +// ------------------------------------------------------------------------------------------------ +// CreateInstance and CreateDevice support structures + +/* Sub type of structure for instance and device loader ext of CreateInfo. + * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + * then VkLayerFunction indicates struct type pointed to by pNext + */ +typedef enum VkLayerFunction_ { + VK_LAYER_LINK_INFO = 0, + VK_LOADER_DATA_CALLBACK = 1, + VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2, + VK_LOADER_FEATURES = 3, +} VkLayerFunction; + +typedef struct VkLayerInstanceLink_ { + struct VkLayerInstanceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr; +} VkLayerInstanceLink; + +/* + * When creating the device chain the loader needs to pass + * down information about it's device structure needed at + * the end of the chain. Passing the data via the + * VkLayerDeviceInfo avoids issues with finding the + * exact instance being used. + */ +typedef struct VkLayerDeviceInfo_ { + void *device_info; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; +} VkLayerDeviceInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA); +typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction); + +typedef enum VkLoaderFeastureFlagBits { + VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001, +} VkLoaderFlagBits; +typedef VkFlags VkLoaderFeatureFlags; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerInstanceLink *pLayerInfo; + PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData; + struct { + PFN_vkLayerCreateDevice pfnLayerCreateDevice; + PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; + } layerDevice; + VkLoaderFeatureFlags loaderFeatures; + } u; +} VkLayerInstanceCreateInfo; + +typedef struct VkLayerDeviceLink_ { + struct VkLayerDeviceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr; +} VkLayerDeviceLink; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerDeviceLink *pLayerInfo; + PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData; + } u; +} VkLayerDeviceCreateInfo; + +#ifdef __cplusplus +extern "C" { +#endif + +VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct); + +typedef enum VkChainType { + VK_CHAIN_TYPE_UNKNOWN = 0, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION = 3, +} VkChainType; + +typedef struct VkChainHeader { + VkChainType type; + uint32_t version; + uint32_t size; +} VkChainHeader; + +typedef struct VkEnumerateInstanceExtensionPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *, + VkExtensionProperties *); + const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const { + return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceExtensionPropertiesChain; + +typedef struct VkEnumerateInstanceLayerPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *); + const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const { + return pfnNextLayer(pNextLink, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceLayerPropertiesChain; + +typedef struct VkEnumerateInstanceVersionChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceVersionChain *, uint32_t *); + const struct VkEnumerateInstanceVersionChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pApiVersion) const { + return pfnNextLayer(pNextLink, pApiVersion); + } +#endif +} VkEnumerateInstanceVersionChain; + +#ifdef __cplusplus +} +#endif diff --git a/MacroLibX/third_party/vulkan/vk_platform.h b/MacroLibX/third_party/vulkan/vk_platform.h new file mode 100644 index 0000000..ed67a60 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vk_platform.h @@ -0,0 +1,84 @@ +// +// File: vk_platform.h +// +/* +** Copyright 2014-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + + +#ifndef VK_PLATFORM_H_ +#define VK_PLATFORM_H_ + +#ifdef __cplusplus +extern "C" +{ +#endif // __cplusplus + +/* +*************************************************************************************************** +* Platform-specific directives and type declarations +*************************************************************************************************** +*/ + +/* Platform-specific calling convention macros. + * + * Platforms should define these so that Vulkan clients call Vulkan commands + * with the same calling conventions that the Vulkan implementation expects. + * + * VKAPI_ATTR - Placed before the return type in function declarations. + * Useful for C++11 and GCC/Clang-style function attribute syntax. + * VKAPI_CALL - Placed after the return type in function declarations. + * Useful for MSVC-style calling convention syntax. + * VKAPI_PTR - Placed between the '(' and '*' in function pointer types. + * + * Function declaration: VKAPI_ATTR void VKAPI_CALL vkCommand(void); + * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void); + */ +#if defined(_WIN32) + // On Windows, Vulkan commands use the stdcall convention + #define VKAPI_ATTR + #define VKAPI_CALL __stdcall + #define VKAPI_PTR VKAPI_CALL +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7 + #error "Vulkan is not supported for the 'armeabi' NDK ABI" +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE) + // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat" + // calling convention, i.e. float parameters are passed in registers. This + // is true even if the rest of the application passes floats on the stack, + // as it does by default when compiling for the armeabi-v7a NDK ABI. + #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp"))) + #define VKAPI_CALL + #define VKAPI_PTR VKAPI_ATTR +#else + // On other platforms, use the default calling convention + #define VKAPI_ATTR + #define VKAPI_CALL + #define VKAPI_PTR +#endif + +#if !defined(VK_NO_STDDEF_H) + #include +#endif // !defined(VK_NO_STDDEF_H) + +#if !defined(VK_NO_STDINT_H) + #if defined(_MSC_VER) && (_MSC_VER < 1600) + typedef signed __int8 int8_t; + typedef unsigned __int8 uint8_t; + typedef signed __int16 int16_t; + typedef unsigned __int16 uint16_t; + typedef signed __int32 int32_t; + typedef unsigned __int32 uint32_t; + typedef signed __int64 int64_t; + typedef unsigned __int64 uint64_t; + #else + #include + #endif +#endif // !defined(VK_NO_STDINT_H) + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan.cppm b/MacroLibX/third_party/vulkan/vulkan.cppm new file mode 100644 index 0000000..1ba2ff4 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan.cppm @@ -0,0 +1,4741 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +// Note: This module is still in an experimental state. +// Any feedback is welcome on https://github.com/KhronosGroup/Vulkan-Hpp/issues. + +module; + +#include +#include +#include +#include +#include +#include + +export module vulkan_hpp; + +export namespace VULKAN_HPP_NAMESPACE +{ + //===================================== + //=== HARDCODED TYPEs AND FUNCTIONs === + //===================================== + using VULKAN_HPP_NAMESPACE::ArrayWrapper1D; + using VULKAN_HPP_NAMESPACE::ArrayWrapper2D; + using VULKAN_HPP_NAMESPACE::DispatchLoaderBase; + using VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic; + using VULKAN_HPP_NAMESPACE::Flags; + using VULKAN_HPP_NAMESPACE::FlagTraits; + +#if !defined( VK_NO_PROTOTYPES ) + using VULKAN_HPP_NAMESPACE::DispatchLoaderStatic; +#endif /*VK_NO_PROTOTYPES*/ + + using VULKAN_HPP_NAMESPACE::operator&; + using VULKAN_HPP_NAMESPACE::operator|; + using VULKAN_HPP_NAMESPACE::operator^; + using VULKAN_HPP_NAMESPACE::operator~; + using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE; + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + using VULKAN_HPP_NAMESPACE::ArrayProxy; + using VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries; + using VULKAN_HPP_NAMESPACE::Optional; + using VULKAN_HPP_NAMESPACE::StridedArrayProxy; + using VULKAN_HPP_NAMESPACE::StructureChain; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + using VULKAN_HPP_NAMESPACE::ObjectDestroy; + using VULKAN_HPP_NAMESPACE::ObjectDestroyShared; + using VULKAN_HPP_NAMESPACE::ObjectFree; + using VULKAN_HPP_NAMESPACE::ObjectFreeShared; + using VULKAN_HPP_NAMESPACE::ObjectRelease; + using VULKAN_HPP_NAMESPACE::ObjectReleaseShared; + using VULKAN_HPP_NAMESPACE::PoolFree; + using VULKAN_HPP_NAMESPACE::PoolFreeShared; + using VULKAN_HPP_NAMESPACE::SharedHandle; + using VULKAN_HPP_NAMESPACE::UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //================== + //=== BASE TYPEs === + //================== + using VULKAN_HPP_NAMESPACE::Bool32; + using VULKAN_HPP_NAMESPACE::DeviceAddress; + using VULKAN_HPP_NAMESPACE::DeviceSize; + using VULKAN_HPP_NAMESPACE::RemoteAddressNV; + using VULKAN_HPP_NAMESPACE::SampleMask; + + //============= + //=== ENUMs === + //============= + using VULKAN_HPP_NAMESPACE::CppType; + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AccessFlagBits; + using VULKAN_HPP_NAMESPACE::AccessFlags; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlagBits; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags; + using VULKAN_HPP_NAMESPACE::AttachmentLoadOp; + using VULKAN_HPP_NAMESPACE::AttachmentStoreOp; + using VULKAN_HPP_NAMESPACE::BlendFactor; + using VULKAN_HPP_NAMESPACE::BlendOp; + using VULKAN_HPP_NAMESPACE::BorderColor; + using VULKAN_HPP_NAMESPACE::BufferCreateFlagBits; + using VULKAN_HPP_NAMESPACE::BufferCreateFlags; + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags; + using VULKAN_HPP_NAMESPACE::BufferViewCreateFlagBits; + using VULKAN_HPP_NAMESPACE::BufferViewCreateFlags; + using VULKAN_HPP_NAMESPACE::ColorComponentFlagBits; + using VULKAN_HPP_NAMESPACE::ColorComponentFlags; + using VULKAN_HPP_NAMESPACE::CommandBufferLevel; + using VULKAN_HPP_NAMESPACE::CommandBufferResetFlagBits; + using VULKAN_HPP_NAMESPACE::CommandBufferResetFlags; + using VULKAN_HPP_NAMESPACE::CommandBufferUsageFlagBits; + using VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::CommandPoolResetFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolResetFlags; + using VULKAN_HPP_NAMESPACE::CompareOp; + using VULKAN_HPP_NAMESPACE::ComponentSwizzle; + using VULKAN_HPP_NAMESPACE::CullModeFlagBits; + using VULKAN_HPP_NAMESPACE::CullModeFlags; + using VULKAN_HPP_NAMESPACE::DependencyFlagBits; + using VULKAN_HPP_NAMESPACE::DependencyFlags; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorType; + using VULKAN_HPP_NAMESPACE::DeviceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DeviceCreateFlags; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags; + using VULKAN_HPP_NAMESPACE::DynamicState; + using VULKAN_HPP_NAMESPACE::EventCreateFlagBits; + using VULKAN_HPP_NAMESPACE::EventCreateFlags; + using VULKAN_HPP_NAMESPACE::FenceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::FenceCreateFlags; + using VULKAN_HPP_NAMESPACE::Filter; + using VULKAN_HPP_NAMESPACE::Format; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlags; + using VULKAN_HPP_NAMESPACE::FramebufferCreateFlagBits; + using VULKAN_HPP_NAMESPACE::FramebufferCreateFlags; + using VULKAN_HPP_NAMESPACE::FrontFace; + using VULKAN_HPP_NAMESPACE::ImageAspectFlagBits; + using VULKAN_HPP_NAMESPACE::ImageAspectFlags; + using VULKAN_HPP_NAMESPACE::ImageCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ImageCreateFlags; + using VULKAN_HPP_NAMESPACE::ImageLayout; + using VULKAN_HPP_NAMESPACE::ImageTiling; + using VULKAN_HPP_NAMESPACE::ImageType; + using VULKAN_HPP_NAMESPACE::ImageUsageFlagBits; + using VULKAN_HPP_NAMESPACE::ImageUsageFlags; + using VULKAN_HPP_NAMESPACE::ImageViewCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ImageViewCreateFlags; + using VULKAN_HPP_NAMESPACE::ImageViewType; + using VULKAN_HPP_NAMESPACE::IndexType; + using VULKAN_HPP_NAMESPACE::InstanceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::InstanceCreateFlags; + using VULKAN_HPP_NAMESPACE::InternalAllocationType; + using VULKAN_HPP_NAMESPACE::LogicOp; + using VULKAN_HPP_NAMESPACE::MemoryHeapFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryHeapFlags; + using VULKAN_HPP_NAMESPACE::MemoryMapFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryMapFlags; + using VULKAN_HPP_NAMESPACE::MemoryPropertyFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryPropertyFlags; + using VULKAN_HPP_NAMESPACE::ObjectType; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceType; + using VULKAN_HPP_NAMESPACE::PipelineBindPoint; + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineStageFlags; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PolygonMode; + using VULKAN_HPP_NAMESPACE::PrimitiveTopology; + using VULKAN_HPP_NAMESPACE::QueryControlFlagBits; + using VULKAN_HPP_NAMESPACE::QueryControlFlags; + using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlagBits; + using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::QueryResultFlagBits; + using VULKAN_HPP_NAMESPACE::QueryResultFlags; + using VULKAN_HPP_NAMESPACE::QueryType; + using VULKAN_HPP_NAMESPACE::QueueFlagBits; + using VULKAN_HPP_NAMESPACE::QueueFlags; + using VULKAN_HPP_NAMESPACE::RenderPassCreateFlagBits; + using VULKAN_HPP_NAMESPACE::RenderPassCreateFlags; + using VULKAN_HPP_NAMESPACE::Result; + using VULKAN_HPP_NAMESPACE::SampleCountFlagBits; + using VULKAN_HPP_NAMESPACE::SampleCountFlags; + using VULKAN_HPP_NAMESPACE::SamplerAddressMode; + using VULKAN_HPP_NAMESPACE::SamplerCreateFlagBits; + using VULKAN_HPP_NAMESPACE::SamplerCreateFlags; + using VULKAN_HPP_NAMESPACE::SamplerMipmapMode; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags; + using VULKAN_HPP_NAMESPACE::ShaderStageFlagBits; + using VULKAN_HPP_NAMESPACE::ShaderStageFlags; + using VULKAN_HPP_NAMESPACE::SharingMode; + using VULKAN_HPP_NAMESPACE::SparseImageFormatFlagBits; + using VULKAN_HPP_NAMESPACE::SparseImageFormatFlags; + using VULKAN_HPP_NAMESPACE::SparseMemoryBindFlagBits; + using VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags; + using VULKAN_HPP_NAMESPACE::StencilFaceFlagBits; + using VULKAN_HPP_NAMESPACE::StencilFaceFlags; + using VULKAN_HPP_NAMESPACE::StencilOp; + using VULKAN_HPP_NAMESPACE::StructureType; + using VULKAN_HPP_NAMESPACE::SubpassContents; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionFlagBits; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags; + using VULKAN_HPP_NAMESPACE::SystemAllocationScope; + using VULKAN_HPP_NAMESPACE::VendorId; + using VULKAN_HPP_NAMESPACE::VertexInputRate; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::ChromaLocation; + using VULKAN_HPP_NAMESPACE::ChromaLocationKHR; + using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateTypeKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::FenceImportFlagBits; + using VULKAN_HPP_NAMESPACE::FenceImportFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::FenceImportFlags; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlags; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags; + using VULKAN_HPP_NAMESPACE::PointClippingBehavior; + using VULKAN_HPP_NAMESPACE::PointClippingBehaviorKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversionKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrRange; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrRangeKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlags; + using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags; + using VULKAN_HPP_NAMESPACE::TessellationDomainOrigin; + using VULKAN_HPP_NAMESPACE::TessellationDomainOriginKHR; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlags; + using VULKAN_HPP_NAMESPACE::DriverId; + using VULKAN_HPP_NAMESPACE::DriverIdKHR; + using VULKAN_HPP_NAMESPACE::ResolveModeFlagBits; + using VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ResolveModeFlags; + using VULKAN_HPP_NAMESPACE::SamplerReductionMode; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT; + using VULKAN_HPP_NAMESPACE::SemaphoreType; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags; + using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence; + using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::AccessFlagBits2; + using VULKAN_HPP_NAMESPACE::AccessFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::AccessFlags2; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlags2; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::PipelineStageFlags2; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags; + using VULKAN_HPP_NAMESPACE::RenderingFlagBits; + using VULKAN_HPP_NAMESPACE::RenderingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::RenderingFlags; + using VULKAN_HPP_NAMESPACE::SubmitFlagBits; + using VULKAN_HPP_NAMESPACE::SubmitFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SubmitFlags; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBits; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlags; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::ColorSpaceKHR; + using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR; + using VULKAN_HPP_NAMESPACE::PresentModeKHR; + using VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT; + + //=== VK_AMD_rasterization_order === + using VULKAN_HPP_NAMESPACE::RasterizationOrderAMD; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::QueryResultStatusKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR; + + //=== VK_KHR_video_decode_queue === + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR; + + //=== VK_EXT_transform_feedback === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT; + + //=== VK_KHR_video_encode_h264 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR; + + //=== VK_KHR_video_encode_h265 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR; + + //=== VK_KHR_video_decode_h264 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsKHR; + + //=== VK_AMD_shader_info === + using VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagBitsGGP; + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV; + + //=== VK_EXT_validation_flags === + using VULKAN_HPP_NAMESPACE::ValidationCheckEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagBitsNN; + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_pipeline_robustness === + using VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT; + + //=== VK_EXT_conditional_rendering === + using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT; + + //=== VK_EXT_display_surface_counter === + using VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT; + + //=== VK_EXT_display_control === + using VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT; + + //=== VK_NV_viewport_swizzle === + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV; + using VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV; + + //=== VK_EXT_discard_rectangles === + using VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT; + + //=== VK_EXT_conservative_rasterization === + using VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT; + + //=== VK_EXT_depth_clip_enable === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT; + + //=== VK_KHR_performance_query === + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagBitsMVK; + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagBitsMVK; + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT; + + //=== VK_EXT_blend_operation_advanced === + using VULKAN_HPP_NAMESPACE::BlendOverlapEXT; + + //=== VK_NV_fragment_coverage_to_color === + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsNV; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV; + using VULKAN_HPP_NAMESPACE::GeometryFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::GeometryFlagBitsNV; + using VULKAN_HPP_NAMESPACE::GeometryFlagsKHR; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsNV; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR; + using VULKAN_HPP_NAMESPACE::GeometryTypeKHR; + using VULKAN_HPP_NAMESPACE::GeometryTypeNV; + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV; + using VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR; + + //=== VK_NV_framebuffer_mixed_samples === + using VULKAN_HPP_NAMESPACE::CoverageModulationModeNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheHeaderVersionEXT; + + //=== VK_NV_shading_rate_image === + using VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV; + using VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV; + + //=== VK_AMD_pipeline_compiler_control === + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagBitsAMD; + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD; + + //=== VK_KHR_global_priority === + using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT; + using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR; + + //=== VK_AMD_memory_overallocation_behavior === + using VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR; + + //=== VK_AMD_shader_core_properties2 === + using VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagBitsAMD; + using VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD; + + //=== VK_EXT_validation_features === + using VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT; + using VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT; + + //=== VK_NV_coverage_reduction_mode === + using VULKAN_HPP_NAMESPACE::CoverageReductionModeNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV; + + //=== VK_EXT_provoking_vertex === + using VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + using VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT; + + //=== VK_EXT_line_rasterization === + using VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT; + + //=== VK_KHR_pipeline_executable_properties === + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR; + + //=== VK_EXT_host_image_copy === + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT; + + //=== VK_KHR_map_memory2 === + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR; + + //=== VK_EXT_surface_maintenance1 === + using VULKAN_HPP_NAMESPACE::PresentGravityFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT; + using VULKAN_HPP_NAMESPACE::PresentScalingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagBitsNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV; + using VULKAN_HPP_NAMESPACE::IndirectStateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV; + + //=== VK_EXT_depth_bias_control === + using VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT; + + //=== VK_KHR_video_encode_queue === + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR; + + //=== VK_NV_device_diagnostics_config === + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagBitsNV; + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + using VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagsEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT; + + //=== VK_NV_fragment_shading_rate_enums === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateNV; + using VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV; + + //=== VK_NV_ray_tracing_motion_blur === + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagBitsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagBitsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV; + + //=== VK_EXT_image_compression_control === + using VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT; + + //=== VK_EXT_device_fault === + using VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT; + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_frame_boundary === + using VULKAN_HPP_NAMESPACE::FrameBoundaryFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagBitsQNX; + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::BuildMicromapFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT; + using VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT; + using VULKAN_HPP_NAMESPACE::MicromapTypeEXT; + using VULKAN_HPP_NAMESPACE::OpacityMicromapFormatEXT; + using VULKAN_HPP_NAMESPACE::OpacityMicromapSpecialIndexEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + using VULKAN_HPP_NAMESPACE::DisplacementMicromapFormatNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_ARM_scheduling_controls === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagBitsARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM; + + //=== VK_NV_memory_decompression === + using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagBitsNV; + using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV; + + //=== VK_EXT_subpass_merge_feedback === + using VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagBitsLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV; + + //=== VK_KHR_maintenance5 === + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT; + + //=== VK_NV_ray_tracing_invocation_reorder === + using VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV; + + //=== VK_EXT_layer_settings === + using VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT; + + //=== VK_NV_low_latency2 === + using VULKAN_HPP_NAMESPACE::LatencyMarkerNV; + using VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV; + + //=== VK_KHR_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::ComponentTypeKHR; + using VULKAN_HPP_NAMESPACE::ComponentTypeNV; + using VULKAN_HPP_NAMESPACE::ScopeKHR; + using VULKAN_HPP_NAMESPACE::ScopeNV; + + //=== VK_QCOM_image_processing2 === + using VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM; + + //=== VK_QCOM_filter_cubic_weights === + using VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM; + + //=== VK_MSFT_layered_driver === + using VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT; + + //=== VK_KHR_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::TimeDomainEXT; + using VULKAN_HPP_NAMESPACE::TimeDomainKHR; + + //========================= + //=== Index Type Traits === + //========================= + using VULKAN_HPP_NAMESPACE::IndexTypeValue; + + //====================== + //=== ENUM to_string === + //====================== +#if !defined( VULKAN_HPP_NO_TO_STRING ) + using VULKAN_HPP_NAMESPACE::to_string; + using VULKAN_HPP_NAMESPACE::toHexString; +#endif /*VULKAN_HPP_NO_TO_STRING*/ + + //============================= + //=== EXCEPTIONs AND ERRORs === + //============================= +#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) + using VULKAN_HPP_NAMESPACE::DeviceLostError; + using VULKAN_HPP_NAMESPACE::Error; + using VULKAN_HPP_NAMESPACE::errorCategory; + using VULKAN_HPP_NAMESPACE::ErrorCategoryImpl; + using VULKAN_HPP_NAMESPACE::ExtensionNotPresentError; + using VULKAN_HPP_NAMESPACE::FeatureNotPresentError; + using VULKAN_HPP_NAMESPACE::FormatNotSupportedError; + using VULKAN_HPP_NAMESPACE::FragmentationError; + using VULKAN_HPP_NAMESPACE::FragmentedPoolError; + using VULKAN_HPP_NAMESPACE::ImageUsageNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::IncompatibleDisplayKHRError; + using VULKAN_HPP_NAMESPACE::IncompatibleDriverError; + using VULKAN_HPP_NAMESPACE::InitializationFailedError; + using VULKAN_HPP_NAMESPACE::InvalidDrmFormatModifierPlaneLayoutEXTError; + using VULKAN_HPP_NAMESPACE::InvalidExternalHandleError; + using VULKAN_HPP_NAMESPACE::InvalidOpaqueCaptureAddressError; + using VULKAN_HPP_NAMESPACE::InvalidShaderNVError; + using VULKAN_HPP_NAMESPACE::LayerNotPresentError; + using VULKAN_HPP_NAMESPACE::LogicError; + using VULKAN_HPP_NAMESPACE::make_error_code; + using VULKAN_HPP_NAMESPACE::make_error_condition; + using VULKAN_HPP_NAMESPACE::MemoryMapFailedError; + using VULKAN_HPP_NAMESPACE::NativeWindowInUseKHRError; + using VULKAN_HPP_NAMESPACE::NotPermittedKHRError; + using VULKAN_HPP_NAMESPACE::OutOfDateKHRError; + using VULKAN_HPP_NAMESPACE::OutOfDeviceMemoryError; + using VULKAN_HPP_NAMESPACE::OutOfHostMemoryError; + using VULKAN_HPP_NAMESPACE::OutOfPoolMemoryError; + using VULKAN_HPP_NAMESPACE::SurfaceLostKHRError; + using VULKAN_HPP_NAMESPACE::SystemError; + using VULKAN_HPP_NAMESPACE::TooManyObjectsError; + using VULKAN_HPP_NAMESPACE::UnknownError; + using VULKAN_HPP_NAMESPACE::ValidationFailedEXTError; + using VULKAN_HPP_NAMESPACE::VideoPictureLayoutNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileCodecNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileFormatNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileOperationNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoStdVersionNotSupportedKHRError; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + using VULKAN_HPP_NAMESPACE::FullScreenExclusiveModeLostEXTError; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + using VULKAN_HPP_NAMESPACE::CompressionExhaustedEXTError; + using VULKAN_HPP_NAMESPACE::IncompatibleShaderBinaryEXTError; + using VULKAN_HPP_NAMESPACE::InvalidVideoStdParametersKHRError; +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + using VULKAN_HPP_NAMESPACE::createResultValueType; + using VULKAN_HPP_NAMESPACE::ignore; + using VULKAN_HPP_NAMESPACE::resultCheck; + using VULKAN_HPP_NAMESPACE::ResultValue; + using VULKAN_HPP_NAMESPACE::ResultValueType; + + //=========================== + //=== CONSTEXPR CONSTANTs === + //=========================== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AttachmentUnused; + using VULKAN_HPP_NAMESPACE::False; + using VULKAN_HPP_NAMESPACE::LodClampNone; + using VULKAN_HPP_NAMESPACE::MaxDescriptionSize; + using VULKAN_HPP_NAMESPACE::MaxExtensionNameSize; + using VULKAN_HPP_NAMESPACE::MaxMemoryHeaps; + using VULKAN_HPP_NAMESPACE::MaxMemoryTypes; + using VULKAN_HPP_NAMESPACE::MaxPhysicalDeviceNameSize; + using VULKAN_HPP_NAMESPACE::QueueFamilyIgnored; + using VULKAN_HPP_NAMESPACE::RemainingArrayLayers; + using VULKAN_HPP_NAMESPACE::RemainingMipLevels; + using VULKAN_HPP_NAMESPACE::SubpassExternal; + using VULKAN_HPP_NAMESPACE::True; + using VULKAN_HPP_NAMESPACE::UuidSize; + using VULKAN_HPP_NAMESPACE::WholeSize; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::LuidSize; + using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSize; + using VULKAN_HPP_NAMESPACE::QueueFamilyExternal; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize; + using VULKAN_HPP_NAMESPACE::MaxDriverNameSize; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::KHRSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSurfaceSpecVersion; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::KHRSwapchainExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSwapchainSpecVersion; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::KHRDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDisplaySpecVersion; + + //=== VK_KHR_display_swapchain === + using VULKAN_HPP_NAMESPACE::KHRDisplaySwapchainExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDisplaySwapchainSpecVersion; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + using VULKAN_HPP_NAMESPACE::KHRXlibSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRXlibSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + using VULKAN_HPP_NAMESPACE::KHRXcbSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRXcbSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + using VULKAN_HPP_NAMESPACE::KHRWaylandSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRWaylandSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + using VULKAN_HPP_NAMESPACE::KHRAndroidSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRAndroidSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + using VULKAN_HPP_NAMESPACE::KHRWin32SurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRWin32SurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::EXTDebugReportExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDebugReportSpecVersion; + + //=== VK_NV_glsl_shader === + using VULKAN_HPP_NAMESPACE::NVGlslShaderExtensionName; + using VULKAN_HPP_NAMESPACE::NVGlslShaderSpecVersion; + + //=== VK_EXT_depth_range_unrestricted === + using VULKAN_HPP_NAMESPACE::EXTDepthRangeUnrestrictedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthRangeUnrestrictedSpecVersion; + + //=== VK_KHR_sampler_mirror_clamp_to_edge === + using VULKAN_HPP_NAMESPACE::KHRSamplerMirrorClampToEdgeExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSamplerMirrorClampToEdgeSpecVersion; + + //=== VK_IMG_filter_cubic === + using VULKAN_HPP_NAMESPACE::IMGFilterCubicExtensionName; + using VULKAN_HPP_NAMESPACE::IMGFilterCubicSpecVersion; + + //=== VK_AMD_rasterization_order === + using VULKAN_HPP_NAMESPACE::AMDRasterizationOrderExtensionName; + using VULKAN_HPP_NAMESPACE::AMDRasterizationOrderSpecVersion; + + //=== VK_AMD_shader_trinary_minmax === + using VULKAN_HPP_NAMESPACE::AMDShaderTrinaryMinmaxExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderTrinaryMinmaxSpecVersion; + + //=== VK_AMD_shader_explicit_vertex_parameter === + using VULKAN_HPP_NAMESPACE::AMDShaderExplicitVertexParameterExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderExplicitVertexParameterSpecVersion; + + //=== VK_EXT_debug_marker === + using VULKAN_HPP_NAMESPACE::EXTDebugMarkerExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDebugMarkerSpecVersion; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::KHRVideoQueueExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoQueueSpecVersion; + + //=== VK_KHR_video_decode_queue === + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeQueueExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeQueueSpecVersion; + + //=== VK_AMD_gcn_shader === + using VULKAN_HPP_NAMESPACE::AMDGcnShaderExtensionName; + using VULKAN_HPP_NAMESPACE::AMDGcnShaderSpecVersion; + + //=== VK_NV_dedicated_allocation === + using VULKAN_HPP_NAMESPACE::NVDedicatedAllocationExtensionName; + using VULKAN_HPP_NAMESPACE::NVDedicatedAllocationSpecVersion; + + //=== VK_EXT_transform_feedback === + using VULKAN_HPP_NAMESPACE::EXTTransformFeedbackExtensionName; + using VULKAN_HPP_NAMESPACE::EXTTransformFeedbackSpecVersion; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::NVXBinaryImportExtensionName; + using VULKAN_HPP_NAMESPACE::NVXBinaryImportSpecVersion; + + //=== VK_NVX_image_view_handle === + using VULKAN_HPP_NAMESPACE::NVXImageViewHandleExtensionName; + using VULKAN_HPP_NAMESPACE::NVXImageViewHandleSpecVersion; + + //=== VK_AMD_draw_indirect_count === + using VULKAN_HPP_NAMESPACE::AMDDrawIndirectCountExtensionName; + using VULKAN_HPP_NAMESPACE::AMDDrawIndirectCountSpecVersion; + + //=== VK_AMD_negative_viewport_height === + using VULKAN_HPP_NAMESPACE::AMDNegativeViewportHeightExtensionName; + using VULKAN_HPP_NAMESPACE::AMDNegativeViewportHeightSpecVersion; + + //=== VK_AMD_gpu_shader_half_float === + using VULKAN_HPP_NAMESPACE::AMDGpuShaderHalfFloatExtensionName; + using VULKAN_HPP_NAMESPACE::AMDGpuShaderHalfFloatSpecVersion; + + //=== VK_AMD_shader_ballot === + using VULKAN_HPP_NAMESPACE::AMDShaderBallotExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderBallotSpecVersion; + + //=== VK_KHR_video_encode_h264 === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeH264ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeH264SpecVersion; + + //=== VK_KHR_video_encode_h265 === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeH265ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeH265SpecVersion; + + //=== VK_KHR_video_decode_h264 === + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeH264ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeH264SpecVersion; + + //=== VK_AMD_texture_gather_bias_lod === + using VULKAN_HPP_NAMESPACE::AMDTextureGatherBiasLodExtensionName; + using VULKAN_HPP_NAMESPACE::AMDTextureGatherBiasLodSpecVersion; + + //=== VK_AMD_shader_info === + using VULKAN_HPP_NAMESPACE::AMDShaderInfoExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderInfoSpecVersion; + + //=== VK_KHR_dynamic_rendering === + using VULKAN_HPP_NAMESPACE::KHRDynamicRenderingExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDynamicRenderingSpecVersion; + + //=== VK_AMD_shader_image_load_store_lod === + using VULKAN_HPP_NAMESPACE::AMDShaderImageLoadStoreLodExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderImageLoadStoreLodSpecVersion; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + using VULKAN_HPP_NAMESPACE::GGPStreamDescriptorSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::GGPStreamDescriptorSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + using VULKAN_HPP_NAMESPACE::NVCornerSampledImageExtensionName; + using VULKAN_HPP_NAMESPACE::NVCornerSampledImageSpecVersion; + + //=== VK_KHR_multiview === + using VULKAN_HPP_NAMESPACE::KHRMultiviewExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMultiviewSpecVersion; + + //=== VK_IMG_format_pvrtc === + using VULKAN_HPP_NAMESPACE::IMGFormatPvrtcExtensionName; + using VULKAN_HPP_NAMESPACE::IMGFormatPvrtcSpecVersion; + + //=== VK_NV_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::NVExternalMemoryCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalMemoryCapabilitiesSpecVersion; + + //=== VK_NV_external_memory === + using VULKAN_HPP_NAMESPACE::NVExternalMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalMemorySpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::NVExternalMemoryWin32ExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalMemoryWin32SpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::NVWin32KeyedMutexExtensionName; + using VULKAN_HPP_NAMESPACE::NVWin32KeyedMutexSpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + using VULKAN_HPP_NAMESPACE::KHRGetPhysicalDeviceProperties2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGetPhysicalDeviceProperties2SpecVersion; + + //=== VK_KHR_device_group === + using VULKAN_HPP_NAMESPACE::KHRDeviceGroupExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDeviceGroupSpecVersion; + + //=== VK_EXT_validation_flags === + using VULKAN_HPP_NAMESPACE::EXTValidationFlagsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTValidationFlagsSpecVersion; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + using VULKAN_HPP_NAMESPACE::NNViSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::NNViSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_shader_draw_parameters === + using VULKAN_HPP_NAMESPACE::KHRShaderDrawParametersExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderDrawParametersSpecVersion; + + //=== VK_EXT_shader_subgroup_ballot === + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupBallotExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupBallotSpecVersion; + + //=== VK_EXT_shader_subgroup_vote === + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupVoteExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderSubgroupVoteSpecVersion; + + //=== VK_EXT_texture_compression_astc_hdr === + using VULKAN_HPP_NAMESPACE::EXTTextureCompressionAstcHdrExtensionName; + using VULKAN_HPP_NAMESPACE::EXTTextureCompressionAstcHdrSpecVersion; + + //=== VK_EXT_astc_decode_mode === + using VULKAN_HPP_NAMESPACE::EXTAstcDecodeModeExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAstcDecodeModeSpecVersion; + + //=== VK_EXT_pipeline_robustness === + using VULKAN_HPP_NAMESPACE::EXTPipelineRobustnessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineRobustnessSpecVersion; + + //=== VK_KHR_maintenance1 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance1SpecVersion; + + //=== VK_KHR_device_group_creation === + using VULKAN_HPP_NAMESPACE::KHRDeviceGroupCreationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDeviceGroupCreationSpecVersion; + using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSizeKHR; + + //=== VK_KHR_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryCapabilitiesSpecVersion; + using VULKAN_HPP_NAMESPACE::LuidSizeKHR; + + //=== VK_KHR_external_memory === + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalMemorySpecVersion; + using VULKAN_HPP_NAMESPACE::QueueFamilyExternalKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryWin32ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryWin32SpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryFdExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalMemoryFdSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::KHRWin32KeyedMutexExtensionName; + using VULKAN_HPP_NAMESPACE::KHRWin32KeyedMutexSpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_capabilities === + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreCapabilitiesSpecVersion; + + //=== VK_KHR_external_semaphore === + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreWin32ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreWin32SpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreFdExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalSemaphoreFdSpecVersion; + + //=== VK_KHR_push_descriptor === + using VULKAN_HPP_NAMESPACE::KHRPushDescriptorExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPushDescriptorSpecVersion; + + //=== VK_EXT_conditional_rendering === + using VULKAN_HPP_NAMESPACE::EXTConditionalRenderingExtensionName; + using VULKAN_HPP_NAMESPACE::EXTConditionalRenderingSpecVersion; + + //=== VK_KHR_shader_float16_int8 === + using VULKAN_HPP_NAMESPACE::KHRShaderFloat16Int8ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderFloat16Int8SpecVersion; + + //=== VK_KHR_16bit_storage === + using VULKAN_HPP_NAMESPACE::KHR16BitStorageExtensionName; + using VULKAN_HPP_NAMESPACE::KHR16BitStorageSpecVersion; + + //=== VK_KHR_incremental_present === + using VULKAN_HPP_NAMESPACE::KHRIncrementalPresentExtensionName; + using VULKAN_HPP_NAMESPACE::KHRIncrementalPresentSpecVersion; + + //=== VK_KHR_descriptor_update_template === + using VULKAN_HPP_NAMESPACE::KHRDescriptorUpdateTemplateExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDescriptorUpdateTemplateSpecVersion; + + //=== VK_NV_clip_space_w_scaling === + using VULKAN_HPP_NAMESPACE::NVClipSpaceWScalingExtensionName; + using VULKAN_HPP_NAMESPACE::NVClipSpaceWScalingSpecVersion; + + //=== VK_EXT_direct_mode_display === + using VULKAN_HPP_NAMESPACE::EXTDirectModeDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDirectModeDisplaySpecVersion; + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + using VULKAN_HPP_NAMESPACE::EXTAcquireXlibDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAcquireXlibDisplaySpecVersion; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + using VULKAN_HPP_NAMESPACE::EXTDisplaySurfaceCounterExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDisplaySurfaceCounterSpecVersion; + + //=== VK_EXT_display_control === + using VULKAN_HPP_NAMESPACE::EXTDisplayControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDisplayControlSpecVersion; + + //=== VK_GOOGLE_display_timing === + using VULKAN_HPP_NAMESPACE::GOOGLEDisplayTimingExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLEDisplayTimingSpecVersion; + + //=== VK_NV_sample_mask_override_coverage === + using VULKAN_HPP_NAMESPACE::NVSampleMaskOverrideCoverageExtensionName; + using VULKAN_HPP_NAMESPACE::NVSampleMaskOverrideCoverageSpecVersion; + + //=== VK_NV_geometry_shader_passthrough === + using VULKAN_HPP_NAMESPACE::NVGeometryShaderPassthroughExtensionName; + using VULKAN_HPP_NAMESPACE::NVGeometryShaderPassthroughSpecVersion; + + //=== VK_NV_viewport_array2 === + using VULKAN_HPP_NAMESPACE::NVViewportArray2ExtensionName; + using VULKAN_HPP_NAMESPACE::NVViewportArray2SpecVersion; + + //=== VK_NVX_multiview_per_view_attributes === + using VULKAN_HPP_NAMESPACE::NVXMultiviewPerViewAttributesExtensionName; + using VULKAN_HPP_NAMESPACE::NVXMultiviewPerViewAttributesSpecVersion; + + //=== VK_NV_viewport_swizzle === + using VULKAN_HPP_NAMESPACE::NVViewportSwizzleExtensionName; + using VULKAN_HPP_NAMESPACE::NVViewportSwizzleSpecVersion; + + //=== VK_EXT_discard_rectangles === + using VULKAN_HPP_NAMESPACE::EXTDiscardRectanglesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDiscardRectanglesSpecVersion; + + //=== VK_EXT_conservative_rasterization === + using VULKAN_HPP_NAMESPACE::EXTConservativeRasterizationExtensionName; + using VULKAN_HPP_NAMESPACE::EXTConservativeRasterizationSpecVersion; + + //=== VK_EXT_depth_clip_enable === + using VULKAN_HPP_NAMESPACE::EXTDepthClipEnableExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthClipEnableSpecVersion; + + //=== VK_EXT_swapchain_colorspace === + using VULKAN_HPP_NAMESPACE::EXTSwapchainColorSpaceExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSwapchainColorSpaceSpecVersion; + + //=== VK_EXT_hdr_metadata === + using VULKAN_HPP_NAMESPACE::EXTHdrMetadataExtensionName; + using VULKAN_HPP_NAMESPACE::EXTHdrMetadataSpecVersion; + + //=== VK_KHR_imageless_framebuffer === + using VULKAN_HPP_NAMESPACE::KHRImagelessFramebufferExtensionName; + using VULKAN_HPP_NAMESPACE::KHRImagelessFramebufferSpecVersion; + + //=== VK_KHR_create_renderpass2 === + using VULKAN_HPP_NAMESPACE::KHRCreateRenderpass2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRCreateRenderpass2SpecVersion; + + //=== VK_IMG_relaxed_line_rasterization === + using VULKAN_HPP_NAMESPACE::IMGRelaxedLineRasterizationExtensionName; + using VULKAN_HPP_NAMESPACE::IMGRelaxedLineRasterizationSpecVersion; + + //=== VK_KHR_shared_presentable_image === + using VULKAN_HPP_NAMESPACE::KHRSharedPresentableImageExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSharedPresentableImageSpecVersion; + + //=== VK_KHR_external_fence_capabilities === + using VULKAN_HPP_NAMESPACE::KHRExternalFenceCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalFenceCapabilitiesSpecVersion; + + //=== VK_KHR_external_fence === + using VULKAN_HPP_NAMESPACE::KHRExternalFenceExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalFenceSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + using VULKAN_HPP_NAMESPACE::KHRExternalFenceWin32ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalFenceWin32SpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + using VULKAN_HPP_NAMESPACE::KHRExternalFenceFdExtensionName; + using VULKAN_HPP_NAMESPACE::KHRExternalFenceFdSpecVersion; + + //=== VK_KHR_performance_query === + using VULKAN_HPP_NAMESPACE::KHRPerformanceQueryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPerformanceQuerySpecVersion; + + //=== VK_KHR_maintenance2 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance2SpecVersion; + + //=== VK_KHR_get_surface_capabilities2 === + using VULKAN_HPP_NAMESPACE::KHRGetSurfaceCapabilities2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGetSurfaceCapabilities2SpecVersion; + + //=== VK_KHR_variable_pointers === + using VULKAN_HPP_NAMESPACE::KHRVariablePointersExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVariablePointersSpecVersion; + + //=== VK_KHR_get_display_properties2 === + using VULKAN_HPP_NAMESPACE::KHRGetDisplayProperties2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGetDisplayProperties2SpecVersion; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + using VULKAN_HPP_NAMESPACE::MVKIosSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::MVKIosSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + using VULKAN_HPP_NAMESPACE::MVKMacosSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::MVKMacosSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_external_memory_dma_buf === + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryDmaBufExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryDmaBufSpecVersion; + + //=== VK_EXT_queue_family_foreign === + using VULKAN_HPP_NAMESPACE::EXTQueueFamilyForeignExtensionName; + using VULKAN_HPP_NAMESPACE::EXTQueueFamilyForeignSpecVersion; + using VULKAN_HPP_NAMESPACE::QueueFamilyForeignEXT; + + //=== VK_KHR_dedicated_allocation === + using VULKAN_HPP_NAMESPACE::KHRDedicatedAllocationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDedicatedAllocationSpecVersion; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::EXTDebugUtilsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDebugUtilsSpecVersion; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + using VULKAN_HPP_NAMESPACE::ANDROIDExternalMemoryAndroidHardwareBufferExtensionName; + using VULKAN_HPP_NAMESPACE::ANDROIDExternalMemoryAndroidHardwareBufferSpecVersion; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sampler_filter_minmax === + using VULKAN_HPP_NAMESPACE::EXTSamplerFilterMinmaxExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSamplerFilterMinmaxSpecVersion; + + //=== VK_KHR_storage_buffer_storage_class === + using VULKAN_HPP_NAMESPACE::KHRStorageBufferStorageClassExtensionName; + using VULKAN_HPP_NAMESPACE::KHRStorageBufferStorageClassSpecVersion; + + //=== VK_AMD_gpu_shader_int16 === + using VULKAN_HPP_NAMESPACE::AMDGpuShaderInt16ExtensionName; + using VULKAN_HPP_NAMESPACE::AMDGpuShaderInt16SpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + using VULKAN_HPP_NAMESPACE::AMDXShaderEnqueueExtensionName; + using VULKAN_HPP_NAMESPACE::AMDXShaderEnqueueSpecVersion; + using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + using VULKAN_HPP_NAMESPACE::AMDMixedAttachmentSamplesExtensionName; + using VULKAN_HPP_NAMESPACE::AMDMixedAttachmentSamplesSpecVersion; + + //=== VK_AMD_shader_fragment_mask === + using VULKAN_HPP_NAMESPACE::AMDShaderFragmentMaskExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderFragmentMaskSpecVersion; + + //=== VK_EXT_inline_uniform_block === + using VULKAN_HPP_NAMESPACE::EXTInlineUniformBlockExtensionName; + using VULKAN_HPP_NAMESPACE::EXTInlineUniformBlockSpecVersion; + + //=== VK_EXT_shader_stencil_export === + using VULKAN_HPP_NAMESPACE::EXTShaderStencilExportExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderStencilExportSpecVersion; + + //=== VK_EXT_sample_locations === + using VULKAN_HPP_NAMESPACE::EXTSampleLocationsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSampleLocationsSpecVersion; + + //=== VK_KHR_relaxed_block_layout === + using VULKAN_HPP_NAMESPACE::KHRRelaxedBlockLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRelaxedBlockLayoutSpecVersion; + + //=== VK_KHR_get_memory_requirements2 === + using VULKAN_HPP_NAMESPACE::KHRGetMemoryRequirements2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGetMemoryRequirements2SpecVersion; + + //=== VK_KHR_image_format_list === + using VULKAN_HPP_NAMESPACE::KHRImageFormatListExtensionName; + using VULKAN_HPP_NAMESPACE::KHRImageFormatListSpecVersion; + + //=== VK_EXT_blend_operation_advanced === + using VULKAN_HPP_NAMESPACE::EXTBlendOperationAdvancedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTBlendOperationAdvancedSpecVersion; + + //=== VK_NV_fragment_coverage_to_color === + using VULKAN_HPP_NAMESPACE::NVFragmentCoverageToColorExtensionName; + using VULKAN_HPP_NAMESPACE::NVFragmentCoverageToColorSpecVersion; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::KHRAccelerationStructureExtensionName; + using VULKAN_HPP_NAMESPACE::KHRAccelerationStructureSpecVersion; + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::KHRRayTracingPipelineExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRayTracingPipelineSpecVersion; + using VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + + //=== VK_KHR_ray_query === + using VULKAN_HPP_NAMESPACE::KHRRayQueryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRayQuerySpecVersion; + + //=== VK_NV_framebuffer_mixed_samples === + using VULKAN_HPP_NAMESPACE::NVFramebufferMixedSamplesExtensionName; + using VULKAN_HPP_NAMESPACE::NVFramebufferMixedSamplesSpecVersion; + + //=== VK_NV_fill_rectangle === + using VULKAN_HPP_NAMESPACE::NVFillRectangleExtensionName; + using VULKAN_HPP_NAMESPACE::NVFillRectangleSpecVersion; + + //=== VK_NV_shader_sm_builtins === + using VULKAN_HPP_NAMESPACE::NVShaderSmBuiltinsExtensionName; + using VULKAN_HPP_NAMESPACE::NVShaderSmBuiltinsSpecVersion; + + //=== VK_EXT_post_depth_coverage === + using VULKAN_HPP_NAMESPACE::EXTPostDepthCoverageExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPostDepthCoverageSpecVersion; + + //=== VK_KHR_sampler_ycbcr_conversion === + using VULKAN_HPP_NAMESPACE::KHRSamplerYcbcrConversionExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSamplerYcbcrConversionSpecVersion; + + //=== VK_KHR_bind_memory2 === + using VULKAN_HPP_NAMESPACE::KHRBindMemory2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRBindMemory2SpecVersion; + + //=== VK_EXT_image_drm_format_modifier === + using VULKAN_HPP_NAMESPACE::EXTImageDrmFormatModifierExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageDrmFormatModifierSpecVersion; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::EXTValidationCacheExtensionName; + using VULKAN_HPP_NAMESPACE::EXTValidationCacheSpecVersion; + + //=== VK_EXT_descriptor_indexing === + using VULKAN_HPP_NAMESPACE::EXTDescriptorIndexingExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDescriptorIndexingSpecVersion; + + //=== VK_EXT_shader_viewport_index_layer === + using VULKAN_HPP_NAMESPACE::EXTShaderViewportIndexLayerExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderViewportIndexLayerSpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + using VULKAN_HPP_NAMESPACE::KHRPortabilitySubsetExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPortabilitySubsetSpecVersion; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + using VULKAN_HPP_NAMESPACE::NVShadingRateImageExtensionName; + using VULKAN_HPP_NAMESPACE::NVShadingRateImageSpecVersion; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::NVRayTracingExtensionName; + using VULKAN_HPP_NAMESPACE::NVRayTracingSpecVersion; + using VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + + //=== VK_NV_representative_fragment_test === + using VULKAN_HPP_NAMESPACE::NVRepresentativeFragmentTestExtensionName; + using VULKAN_HPP_NAMESPACE::NVRepresentativeFragmentTestSpecVersion; + + //=== VK_KHR_maintenance3 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance3ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance3SpecVersion; + + //=== VK_KHR_draw_indirect_count === + using VULKAN_HPP_NAMESPACE::KHRDrawIndirectCountExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDrawIndirectCountSpecVersion; + + //=== VK_EXT_filter_cubic === + using VULKAN_HPP_NAMESPACE::EXTFilterCubicExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFilterCubicSpecVersion; + + //=== VK_QCOM_render_pass_shader_resolve === + using VULKAN_HPP_NAMESPACE::QCOMRenderPassShaderResolveExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMRenderPassShaderResolveSpecVersion; + + //=== VK_EXT_global_priority === + using VULKAN_HPP_NAMESPACE::EXTGlobalPriorityExtensionName; + using VULKAN_HPP_NAMESPACE::EXTGlobalPrioritySpecVersion; + + //=== VK_KHR_shader_subgroup_extended_types === + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupExtendedTypesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupExtendedTypesSpecVersion; + + //=== VK_KHR_8bit_storage === + using VULKAN_HPP_NAMESPACE::KHR8BitStorageExtensionName; + using VULKAN_HPP_NAMESPACE::KHR8BitStorageSpecVersion; + + //=== VK_EXT_external_memory_host === + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryHostExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryHostSpecVersion; + + //=== VK_AMD_buffer_marker === + using VULKAN_HPP_NAMESPACE::AMDBufferMarkerExtensionName; + using VULKAN_HPP_NAMESPACE::AMDBufferMarkerSpecVersion; + + //=== VK_KHR_shader_atomic_int64 === + using VULKAN_HPP_NAMESPACE::KHRShaderAtomicInt64ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderAtomicInt64SpecVersion; + + //=== VK_KHR_shader_clock === + using VULKAN_HPP_NAMESPACE::KHRShaderClockExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderClockSpecVersion; + + //=== VK_AMD_pipeline_compiler_control === + using VULKAN_HPP_NAMESPACE::AMDPipelineCompilerControlExtensionName; + using VULKAN_HPP_NAMESPACE::AMDPipelineCompilerControlSpecVersion; + + //=== VK_EXT_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::EXTCalibratedTimestampsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTCalibratedTimestampsSpecVersion; + + //=== VK_AMD_shader_core_properties === + using VULKAN_HPP_NAMESPACE::AMDShaderCorePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderCorePropertiesSpecVersion; + + //=== VK_KHR_video_decode_h265 === + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeH265ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoDecodeH265SpecVersion; + + //=== VK_KHR_global_priority === + using VULKAN_HPP_NAMESPACE::KHRGlobalPriorityExtensionName; + using VULKAN_HPP_NAMESPACE::KHRGlobalPrioritySpecVersion; + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKHR; + + //=== VK_AMD_memory_overallocation_behavior === + using VULKAN_HPP_NAMESPACE::AMDMemoryOverallocationBehaviorExtensionName; + using VULKAN_HPP_NAMESPACE::AMDMemoryOverallocationBehaviorSpecVersion; + + //=== VK_EXT_vertex_attribute_divisor === + using VULKAN_HPP_NAMESPACE::EXTVertexAttributeDivisorExtensionName; + using VULKAN_HPP_NAMESPACE::EXTVertexAttributeDivisorSpecVersion; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + using VULKAN_HPP_NAMESPACE::GGPFrameTokenExtensionName; + using VULKAN_HPP_NAMESPACE::GGPFrameTokenSpecVersion; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_EXT_pipeline_creation_feedback === + using VULKAN_HPP_NAMESPACE::EXTPipelineCreationFeedbackExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineCreationFeedbackSpecVersion; + + //=== VK_KHR_driver_properties === + using VULKAN_HPP_NAMESPACE::KHRDriverPropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDriverPropertiesSpecVersion; + using VULKAN_HPP_NAMESPACE::MaxDriverInfoSizeKHR; + using VULKAN_HPP_NAMESPACE::MaxDriverNameSizeKHR; + + //=== VK_KHR_shader_float_controls === + using VULKAN_HPP_NAMESPACE::KHRShaderFloatControlsExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderFloatControlsSpecVersion; + + //=== VK_NV_shader_subgroup_partitioned === + using VULKAN_HPP_NAMESPACE::NVShaderSubgroupPartitionedExtensionName; + using VULKAN_HPP_NAMESPACE::NVShaderSubgroupPartitionedSpecVersion; + + //=== VK_KHR_depth_stencil_resolve === + using VULKAN_HPP_NAMESPACE::KHRDepthStencilResolveExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDepthStencilResolveSpecVersion; + + //=== VK_KHR_swapchain_mutable_format === + using VULKAN_HPP_NAMESPACE::KHRSwapchainMutableFormatExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSwapchainMutableFormatSpecVersion; + + //=== VK_NV_compute_shader_derivatives === + using VULKAN_HPP_NAMESPACE::NVComputeShaderDerivativesExtensionName; + using VULKAN_HPP_NAMESPACE::NVComputeShaderDerivativesSpecVersion; + + //=== VK_NV_mesh_shader === + using VULKAN_HPP_NAMESPACE::NVMeshShaderExtensionName; + using VULKAN_HPP_NAMESPACE::NVMeshShaderSpecVersion; + + //=== VK_NV_fragment_shader_barycentric === + using VULKAN_HPP_NAMESPACE::NVFragmentShaderBarycentricExtensionName; + using VULKAN_HPP_NAMESPACE::NVFragmentShaderBarycentricSpecVersion; + + //=== VK_NV_shader_image_footprint === + using VULKAN_HPP_NAMESPACE::NVShaderImageFootprintExtensionName; + using VULKAN_HPP_NAMESPACE::NVShaderImageFootprintSpecVersion; + + //=== VK_NV_scissor_exclusive === + using VULKAN_HPP_NAMESPACE::NVScissorExclusiveExtensionName; + using VULKAN_HPP_NAMESPACE::NVScissorExclusiveSpecVersion; + + //=== VK_NV_device_diagnostic_checkpoints === + using VULKAN_HPP_NAMESPACE::NVDeviceDiagnosticCheckpointsExtensionName; + using VULKAN_HPP_NAMESPACE::NVDeviceDiagnosticCheckpointsSpecVersion; + + //=== VK_KHR_timeline_semaphore === + using VULKAN_HPP_NAMESPACE::KHRTimelineSemaphoreExtensionName; + using VULKAN_HPP_NAMESPACE::KHRTimelineSemaphoreSpecVersion; + + //=== VK_INTEL_shader_integer_functions2 === + using VULKAN_HPP_NAMESPACE::INTELShaderIntegerFunctions2ExtensionName; + using VULKAN_HPP_NAMESPACE::INTELShaderIntegerFunctions2SpecVersion; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::INTELPerformanceQueryExtensionName; + using VULKAN_HPP_NAMESPACE::INTELPerformanceQuerySpecVersion; + + //=== VK_KHR_vulkan_memory_model === + using VULKAN_HPP_NAMESPACE::KHRVulkanMemoryModelExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVulkanMemoryModelSpecVersion; + + //=== VK_EXT_pci_bus_info === + using VULKAN_HPP_NAMESPACE::EXTPciBusInfoExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPciBusInfoSpecVersion; + + //=== VK_AMD_display_native_hdr === + using VULKAN_HPP_NAMESPACE::AMDDisplayNativeHdrExtensionName; + using VULKAN_HPP_NAMESPACE::AMDDisplayNativeHdrSpecVersion; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + using VULKAN_HPP_NAMESPACE::FUCHSIAImagepipeSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::FUCHSIAImagepipeSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_KHR_shader_terminate_invocation === + using VULKAN_HPP_NAMESPACE::KHRShaderTerminateInvocationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderTerminateInvocationSpecVersion; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + using VULKAN_HPP_NAMESPACE::EXTMetalSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMetalSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMapExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMapSpecVersion; + + //=== VK_EXT_scalar_block_layout === + using VULKAN_HPP_NAMESPACE::EXTScalarBlockLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::EXTScalarBlockLayoutSpecVersion; + + //=== VK_GOOGLE_hlsl_functionality1 === + using VULKAN_HPP_NAMESPACE::GOOGLEHlslFunctionality1ExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLEHlslFunctionality1SpecVersion; + + //=== VK_GOOGLE_decorate_string === + using VULKAN_HPP_NAMESPACE::GOOGLEDecorateStringExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLEDecorateStringSpecVersion; + + //=== VK_EXT_subgroup_size_control === + using VULKAN_HPP_NAMESPACE::EXTSubgroupSizeControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSubgroupSizeControlSpecVersion; + + //=== VK_KHR_fragment_shading_rate === + using VULKAN_HPP_NAMESPACE::KHRFragmentShadingRateExtensionName; + using VULKAN_HPP_NAMESPACE::KHRFragmentShadingRateSpecVersion; + + //=== VK_AMD_shader_core_properties2 === + using VULKAN_HPP_NAMESPACE::AMDShaderCoreProperties2ExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderCoreProperties2SpecVersion; + + //=== VK_AMD_device_coherent_memory === + using VULKAN_HPP_NAMESPACE::AMDDeviceCoherentMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::AMDDeviceCoherentMemorySpecVersion; + + //=== VK_EXT_shader_image_atomic_int64 === + using VULKAN_HPP_NAMESPACE::EXTShaderImageAtomicInt64ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderImageAtomicInt64SpecVersion; + + //=== VK_KHR_spirv_1_4 === + using VULKAN_HPP_NAMESPACE::KHRSpirv14ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSpirv14SpecVersion; + + //=== VK_EXT_memory_budget === + using VULKAN_HPP_NAMESPACE::EXTMemoryBudgetExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMemoryBudgetSpecVersion; + + //=== VK_EXT_memory_priority === + using VULKAN_HPP_NAMESPACE::EXTMemoryPriorityExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMemoryPrioritySpecVersion; + + //=== VK_KHR_surface_protected_capabilities === + using VULKAN_HPP_NAMESPACE::KHRSurfaceProtectedCapabilitiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSurfaceProtectedCapabilitiesSpecVersion; + + //=== VK_NV_dedicated_allocation_image_aliasing === + using VULKAN_HPP_NAMESPACE::NVDedicatedAllocationImageAliasingExtensionName; + using VULKAN_HPP_NAMESPACE::NVDedicatedAllocationImageAliasingSpecVersion; + + //=== VK_KHR_separate_depth_stencil_layouts === + using VULKAN_HPP_NAMESPACE::KHRSeparateDepthStencilLayoutsExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSeparateDepthStencilLayoutsSpecVersion; + + //=== VK_EXT_buffer_device_address === + using VULKAN_HPP_NAMESPACE::EXTBufferDeviceAddressExtensionName; + using VULKAN_HPP_NAMESPACE::EXTBufferDeviceAddressSpecVersion; + + //=== VK_EXT_tooling_info === + using VULKAN_HPP_NAMESPACE::EXTToolingInfoExtensionName; + using VULKAN_HPP_NAMESPACE::EXTToolingInfoSpecVersion; + + //=== VK_EXT_separate_stencil_usage === + using VULKAN_HPP_NAMESPACE::EXTSeparateStencilUsageExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSeparateStencilUsageSpecVersion; + + //=== VK_EXT_validation_features === + using VULKAN_HPP_NAMESPACE::EXTValidationFeaturesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTValidationFeaturesSpecVersion; + + //=== VK_KHR_present_wait === + using VULKAN_HPP_NAMESPACE::KHRPresentWaitExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPresentWaitSpecVersion; + + //=== VK_NV_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrixExtensionName; + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrixSpecVersion; + + //=== VK_NV_coverage_reduction_mode === + using VULKAN_HPP_NAMESPACE::NVCoverageReductionModeExtensionName; + using VULKAN_HPP_NAMESPACE::NVCoverageReductionModeSpecVersion; + + //=== VK_EXT_fragment_shader_interlock === + using VULKAN_HPP_NAMESPACE::EXTFragmentShaderInterlockExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFragmentShaderInterlockSpecVersion; + + //=== VK_EXT_ycbcr_image_arrays === + using VULKAN_HPP_NAMESPACE::EXTYcbcrImageArraysExtensionName; + using VULKAN_HPP_NAMESPACE::EXTYcbcrImageArraysSpecVersion; + + //=== VK_KHR_uniform_buffer_standard_layout === + using VULKAN_HPP_NAMESPACE::KHRUniformBufferStandardLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::KHRUniformBufferStandardLayoutSpecVersion; + + //=== VK_EXT_provoking_vertex === + using VULKAN_HPP_NAMESPACE::EXTProvokingVertexExtensionName; + using VULKAN_HPP_NAMESPACE::EXTProvokingVertexSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + using VULKAN_HPP_NAMESPACE::EXTFullScreenExclusiveExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFullScreenExclusiveSpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + using VULKAN_HPP_NAMESPACE::EXTHeadlessSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::EXTHeadlessSurfaceSpecVersion; + + //=== VK_KHR_buffer_device_address === + using VULKAN_HPP_NAMESPACE::KHRBufferDeviceAddressExtensionName; + using VULKAN_HPP_NAMESPACE::KHRBufferDeviceAddressSpecVersion; + + //=== VK_EXT_line_rasterization === + using VULKAN_HPP_NAMESPACE::EXTLineRasterizationExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLineRasterizationSpecVersion; + + //=== VK_EXT_shader_atomic_float === + using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloatExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloatSpecVersion; + + //=== VK_EXT_host_query_reset === + using VULKAN_HPP_NAMESPACE::EXTHostQueryResetExtensionName; + using VULKAN_HPP_NAMESPACE::EXTHostQueryResetSpecVersion; + + //=== VK_EXT_index_type_uint8 === + using VULKAN_HPP_NAMESPACE::EXTIndexTypeUint8ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTIndexTypeUint8SpecVersion; + + //=== VK_EXT_extended_dynamic_state === + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicStateExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicStateSpecVersion; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::KHRDeferredHostOperationsExtensionName; + using VULKAN_HPP_NAMESPACE::KHRDeferredHostOperationsSpecVersion; + + //=== VK_KHR_pipeline_executable_properties === + using VULKAN_HPP_NAMESPACE::KHRPipelineExecutablePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPipelineExecutablePropertiesSpecVersion; + + //=== VK_EXT_host_image_copy === + using VULKAN_HPP_NAMESPACE::EXTHostImageCopyExtensionName; + using VULKAN_HPP_NAMESPACE::EXTHostImageCopySpecVersion; + + //=== VK_KHR_map_memory2 === + using VULKAN_HPP_NAMESPACE::KHRMapMemory2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMapMemory2SpecVersion; + + //=== VK_EXT_shader_atomic_float2 === + using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloat2ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderAtomicFloat2SpecVersion; + + //=== VK_EXT_surface_maintenance1 === + using VULKAN_HPP_NAMESPACE::EXTSurfaceMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSurfaceMaintenance1SpecVersion; + + //=== VK_EXT_swapchain_maintenance1 === + using VULKAN_HPP_NAMESPACE::EXTSwapchainMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSwapchainMaintenance1SpecVersion; + + //=== VK_EXT_shader_demote_to_helper_invocation === + using VULKAN_HPP_NAMESPACE::EXTShaderDemoteToHelperInvocationExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderDemoteToHelperInvocationSpecVersion; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::NVDeviceGeneratedCommandsExtensionName; + using VULKAN_HPP_NAMESPACE::NVDeviceGeneratedCommandsSpecVersion; + + //=== VK_NV_inherited_viewport_scissor === + using VULKAN_HPP_NAMESPACE::NVInheritedViewportScissorExtensionName; + using VULKAN_HPP_NAMESPACE::NVInheritedViewportScissorSpecVersion; + + //=== VK_KHR_shader_integer_dot_product === + using VULKAN_HPP_NAMESPACE::KHRShaderIntegerDotProductExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderIntegerDotProductSpecVersion; + + //=== VK_EXT_texel_buffer_alignment === + using VULKAN_HPP_NAMESPACE::EXTTexelBufferAlignmentExtensionName; + using VULKAN_HPP_NAMESPACE::EXTTexelBufferAlignmentSpecVersion; + + //=== VK_QCOM_render_pass_transform === + using VULKAN_HPP_NAMESPACE::QCOMRenderPassTransformExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMRenderPassTransformSpecVersion; + + //=== VK_EXT_depth_bias_control === + using VULKAN_HPP_NAMESPACE::EXTDepthBiasControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthBiasControlSpecVersion; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::EXTDeviceMemoryReportExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDeviceMemoryReportSpecVersion; + + //=== VK_EXT_acquire_drm_display === + using VULKAN_HPP_NAMESPACE::EXTAcquireDrmDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAcquireDrmDisplaySpecVersion; + + //=== VK_EXT_robustness2 === + using VULKAN_HPP_NAMESPACE::EXTRobustness2ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTRobustness2SpecVersion; + + //=== VK_EXT_custom_border_color === + using VULKAN_HPP_NAMESPACE::EXTCustomBorderColorExtensionName; + using VULKAN_HPP_NAMESPACE::EXTCustomBorderColorSpecVersion; + + //=== VK_GOOGLE_user_type === + using VULKAN_HPP_NAMESPACE::GOOGLEUserTypeExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLEUserTypeSpecVersion; + + //=== VK_KHR_pipeline_library === + using VULKAN_HPP_NAMESPACE::KHRPipelineLibraryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPipelineLibrarySpecVersion; + + //=== VK_NV_present_barrier === + using VULKAN_HPP_NAMESPACE::NVPresentBarrierExtensionName; + using VULKAN_HPP_NAMESPACE::NVPresentBarrierSpecVersion; + + //=== VK_KHR_shader_non_semantic_info === + using VULKAN_HPP_NAMESPACE::KHRShaderNonSemanticInfoExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderNonSemanticInfoSpecVersion; + + //=== VK_KHR_present_id === + using VULKAN_HPP_NAMESPACE::KHRPresentIdExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPresentIdSpecVersion; + + //=== VK_EXT_private_data === + using VULKAN_HPP_NAMESPACE::EXTPrivateDataExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPrivateDataSpecVersion; + + //=== VK_EXT_pipeline_creation_cache_control === + using VULKAN_HPP_NAMESPACE::EXTPipelineCreationCacheControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineCreationCacheControlSpecVersion; + + //=== VK_KHR_video_encode_queue === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeQueueExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeQueueSpecVersion; + + //=== VK_NV_device_diagnostics_config === + using VULKAN_HPP_NAMESPACE::NVDeviceDiagnosticsConfigExtensionName; + using VULKAN_HPP_NAMESPACE::NVDeviceDiagnosticsConfigSpecVersion; + + //=== VK_QCOM_render_pass_store_ops === + using VULKAN_HPP_NAMESPACE::QCOMRenderPassStoreOpsExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMRenderPassStoreOpsSpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::NVCudaKernelLaunchExtensionName; + using VULKAN_HPP_NAMESPACE::NVCudaKernelLaunchSpecVersion; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_low_latency === + using VULKAN_HPP_NAMESPACE::NVLowLatencyExtensionName; + using VULKAN_HPP_NAMESPACE::NVLowLatencySpecVersion; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + using VULKAN_HPP_NAMESPACE::EXTMetalObjectsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMetalObjectsSpecVersion; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + using VULKAN_HPP_NAMESPACE::KHRSynchronization2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRSynchronization2SpecVersion; + + //=== VK_EXT_descriptor_buffer === + using VULKAN_HPP_NAMESPACE::EXTDescriptorBufferExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDescriptorBufferSpecVersion; + + //=== VK_EXT_graphics_pipeline_library === + using VULKAN_HPP_NAMESPACE::EXTGraphicsPipelineLibraryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTGraphicsPipelineLibrarySpecVersion; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + using VULKAN_HPP_NAMESPACE::AMDShaderEarlyAndLateFragmentTestsExtensionName; + using VULKAN_HPP_NAMESPACE::AMDShaderEarlyAndLateFragmentTestsSpecVersion; + + //=== VK_KHR_fragment_shader_barycentric === + using VULKAN_HPP_NAMESPACE::KHRFragmentShaderBarycentricExtensionName; + using VULKAN_HPP_NAMESPACE::KHRFragmentShaderBarycentricSpecVersion; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupUniformControlFlowExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderSubgroupUniformControlFlowSpecVersion; + + //=== VK_KHR_zero_initialize_workgroup_memory === + using VULKAN_HPP_NAMESPACE::KHRZeroInitializeWorkgroupMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRZeroInitializeWorkgroupMemorySpecVersion; + + //=== VK_NV_fragment_shading_rate_enums === + using VULKAN_HPP_NAMESPACE::NVFragmentShadingRateEnumsExtensionName; + using VULKAN_HPP_NAMESPACE::NVFragmentShadingRateEnumsSpecVersion; + + //=== VK_NV_ray_tracing_motion_blur === + using VULKAN_HPP_NAMESPACE::NVRayTracingMotionBlurExtensionName; + using VULKAN_HPP_NAMESPACE::NVRayTracingMotionBlurSpecVersion; + + //=== VK_EXT_mesh_shader === + using VULKAN_HPP_NAMESPACE::EXTMeshShaderExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMeshShaderSpecVersion; + + //=== VK_EXT_ycbcr_2plane_444_formats === + using VULKAN_HPP_NAMESPACE::EXTYcbcr2Plane444FormatsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTYcbcr2Plane444FormatsSpecVersion; + + //=== VK_EXT_fragment_density_map2 === + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMap2ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFragmentDensityMap2SpecVersion; + + //=== VK_QCOM_rotated_copy_commands === + using VULKAN_HPP_NAMESPACE::QCOMRotatedCopyCommandsExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMRotatedCopyCommandsSpecVersion; + + //=== VK_EXT_image_robustness === + using VULKAN_HPP_NAMESPACE::EXTImageRobustnessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageRobustnessSpecVersion; + + //=== VK_KHR_workgroup_memory_explicit_layout === + using VULKAN_HPP_NAMESPACE::KHRWorkgroupMemoryExplicitLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::KHRWorkgroupMemoryExplicitLayoutSpecVersion; + + //=== VK_KHR_copy_commands2 === + using VULKAN_HPP_NAMESPACE::KHRCopyCommands2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRCopyCommands2SpecVersion; + + //=== VK_EXT_image_compression_control === + using VULKAN_HPP_NAMESPACE::EXTImageCompressionControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageCompressionControlSpecVersion; + + //=== VK_EXT_attachment_feedback_loop_layout === + using VULKAN_HPP_NAMESPACE::EXTAttachmentFeedbackLoopLayoutExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAttachmentFeedbackLoopLayoutSpecVersion; + + //=== VK_EXT_4444_formats === + using VULKAN_HPP_NAMESPACE::EXT4444FormatsExtensionName; + using VULKAN_HPP_NAMESPACE::EXT4444FormatsSpecVersion; + + //=== VK_EXT_device_fault === + using VULKAN_HPP_NAMESPACE::EXTDeviceFaultExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDeviceFaultSpecVersion; + + //=== VK_ARM_rasterization_order_attachment_access === + using VULKAN_HPP_NAMESPACE::ARMRasterizationOrderAttachmentAccessExtensionName; + using VULKAN_HPP_NAMESPACE::ARMRasterizationOrderAttachmentAccessSpecVersion; + + //=== VK_EXT_rgba10x6_formats === + using VULKAN_HPP_NAMESPACE::EXTRgba10X6FormatsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTRgba10X6FormatsSpecVersion; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + using VULKAN_HPP_NAMESPACE::NVAcquireWinrtDisplayExtensionName; + using VULKAN_HPP_NAMESPACE::NVAcquireWinrtDisplaySpecVersion; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + using VULKAN_HPP_NAMESPACE::EXTDirectfbSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDirectfbSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_VALVE_mutable_descriptor_type === + using VULKAN_HPP_NAMESPACE::VALVEMutableDescriptorTypeExtensionName; + using VULKAN_HPP_NAMESPACE::VALVEMutableDescriptorTypeSpecVersion; + + //=== VK_EXT_vertex_input_dynamic_state === + using VULKAN_HPP_NAMESPACE::EXTVertexInputDynamicStateExtensionName; + using VULKAN_HPP_NAMESPACE::EXTVertexInputDynamicStateSpecVersion; + + //=== VK_EXT_physical_device_drm === + using VULKAN_HPP_NAMESPACE::EXTPhysicalDeviceDrmExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPhysicalDeviceDrmSpecVersion; + + //=== VK_EXT_device_address_binding_report === + using VULKAN_HPP_NAMESPACE::EXTDeviceAddressBindingReportExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDeviceAddressBindingReportSpecVersion; + + //=== VK_EXT_depth_clip_control === + using VULKAN_HPP_NAMESPACE::EXTDepthClipControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthClipControlSpecVersion; + + //=== VK_EXT_primitive_topology_list_restart === + using VULKAN_HPP_NAMESPACE::EXTPrimitiveTopologyListRestartExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPrimitiveTopologyListRestartSpecVersion; + + //=== VK_KHR_format_feature_flags2 === + using VULKAN_HPP_NAMESPACE::KHRFormatFeatureFlags2ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRFormatFeatureFlags2SpecVersion; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + using VULKAN_HPP_NAMESPACE::FUCHSIAExternalMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::FUCHSIAExternalMemorySpecVersion; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + using VULKAN_HPP_NAMESPACE::FUCHSIAExternalSemaphoreExtensionName; + using VULKAN_HPP_NAMESPACE::FUCHSIAExternalSemaphoreSpecVersion; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::FUCHSIABufferCollectionExtensionName; + using VULKAN_HPP_NAMESPACE::FUCHSIABufferCollectionSpecVersion; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + using VULKAN_HPP_NAMESPACE::HUAWEISubpassShadingExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEISubpassShadingSpecVersion; + + //=== VK_HUAWEI_invocation_mask === + using VULKAN_HPP_NAMESPACE::HUAWEIInvocationMaskExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEIInvocationMaskSpecVersion; + + //=== VK_NV_external_memory_rdma === + using VULKAN_HPP_NAMESPACE::NVExternalMemoryRdmaExtensionName; + using VULKAN_HPP_NAMESPACE::NVExternalMemoryRdmaSpecVersion; + + //=== VK_EXT_pipeline_properties === + using VULKAN_HPP_NAMESPACE::EXTPipelinePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelinePropertiesSpecVersion; + + //=== VK_EXT_frame_boundary === + using VULKAN_HPP_NAMESPACE::EXTFrameBoundaryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTFrameBoundarySpecVersion; + + //=== VK_EXT_multisampled_render_to_single_sampled === + using VULKAN_HPP_NAMESPACE::EXTMultisampledRenderToSingleSampledExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMultisampledRenderToSingleSampledSpecVersion; + + //=== VK_EXT_extended_dynamic_state2 === + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicState2ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicState2SpecVersion; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + using VULKAN_HPP_NAMESPACE::QNXScreenSurfaceExtensionName; + using VULKAN_HPP_NAMESPACE::QNXScreenSurfaceSpecVersion; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + using VULKAN_HPP_NAMESPACE::EXTColorWriteEnableExtensionName; + using VULKAN_HPP_NAMESPACE::EXTColorWriteEnableSpecVersion; + + //=== VK_EXT_primitives_generated_query === + using VULKAN_HPP_NAMESPACE::EXTPrimitivesGeneratedQueryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPrimitivesGeneratedQuerySpecVersion; + + //=== VK_KHR_ray_tracing_maintenance1 === + using VULKAN_HPP_NAMESPACE::KHRRayTracingMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRayTracingMaintenance1SpecVersion; + + //=== VK_EXT_global_priority_query === + using VULKAN_HPP_NAMESPACE::EXTGlobalPriorityQueryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTGlobalPriorityQuerySpecVersion; + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeEXT; + + //=== VK_EXT_image_view_min_lod === + using VULKAN_HPP_NAMESPACE::EXTImageViewMinLodExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageViewMinLodSpecVersion; + + //=== VK_EXT_multi_draw === + using VULKAN_HPP_NAMESPACE::EXTMultiDrawExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMultiDrawSpecVersion; + + //=== VK_EXT_image_2d_view_of_3d === + using VULKAN_HPP_NAMESPACE::EXTImage2DViewOf3DExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImage2DViewOf3DSpecVersion; + + //=== VK_KHR_portability_enumeration === + using VULKAN_HPP_NAMESPACE::KHRPortabilityEnumerationExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPortabilityEnumerationSpecVersion; + + //=== VK_EXT_shader_tile_image === + using VULKAN_HPP_NAMESPACE::EXTShaderTileImageExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderTileImageSpecVersion; + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::EXTOpacityMicromapExtensionName; + using VULKAN_HPP_NAMESPACE::EXTOpacityMicromapSpecVersion; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + using VULKAN_HPP_NAMESPACE::NVDisplacementMicromapExtensionName; + using VULKAN_HPP_NAMESPACE::NVDisplacementMicromapSpecVersion; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_load_store_op_none === + using VULKAN_HPP_NAMESPACE::EXTLoadStoreOpNoneExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLoadStoreOpNoneSpecVersion; + + //=== VK_HUAWEI_cluster_culling_shader === + using VULKAN_HPP_NAMESPACE::HUAWEIClusterCullingShaderExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEIClusterCullingShaderSpecVersion; + + //=== VK_EXT_border_color_swizzle === + using VULKAN_HPP_NAMESPACE::EXTBorderColorSwizzleExtensionName; + using VULKAN_HPP_NAMESPACE::EXTBorderColorSwizzleSpecVersion; + + //=== VK_EXT_pageable_device_local_memory === + using VULKAN_HPP_NAMESPACE::EXTPageableDeviceLocalMemoryExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPageableDeviceLocalMemorySpecVersion; + + //=== VK_KHR_maintenance4 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance4ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance4SpecVersion; + + //=== VK_ARM_shader_core_properties === + using VULKAN_HPP_NAMESPACE::ARMShaderCorePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::ARMShaderCorePropertiesSpecVersion; + + //=== VK_ARM_scheduling_controls === + using VULKAN_HPP_NAMESPACE::ARMSchedulingControlsExtensionName; + using VULKAN_HPP_NAMESPACE::ARMSchedulingControlsSpecVersion; + + //=== VK_EXT_image_sliced_view_of_3d === + using VULKAN_HPP_NAMESPACE::EXTImageSlicedViewOf3DExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageSlicedViewOf3DSpecVersion; + using VULKAN_HPP_NAMESPACE::Remaining3DSlicesEXT; + + //=== VK_VALVE_descriptor_set_host_mapping === + using VULKAN_HPP_NAMESPACE::VALVEDescriptorSetHostMappingExtensionName; + using VULKAN_HPP_NAMESPACE::VALVEDescriptorSetHostMappingSpecVersion; + + //=== VK_EXT_depth_clamp_zero_one === + using VULKAN_HPP_NAMESPACE::EXTDepthClampZeroOneExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthClampZeroOneSpecVersion; + + //=== VK_EXT_non_seamless_cube_map === + using VULKAN_HPP_NAMESPACE::EXTNonSeamlessCubeMapExtensionName; + using VULKAN_HPP_NAMESPACE::EXTNonSeamlessCubeMapSpecVersion; + + //=== VK_ARM_render_pass_striped === + using VULKAN_HPP_NAMESPACE::ARMRenderPassStripedExtensionName; + using VULKAN_HPP_NAMESPACE::ARMRenderPassStripedSpecVersion; + + //=== VK_QCOM_fragment_density_map_offset === + using VULKAN_HPP_NAMESPACE::QCOMFragmentDensityMapOffsetExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMFragmentDensityMapOffsetSpecVersion; + + //=== VK_NV_copy_memory_indirect === + using VULKAN_HPP_NAMESPACE::NVCopyMemoryIndirectExtensionName; + using VULKAN_HPP_NAMESPACE::NVCopyMemoryIndirectSpecVersion; + + //=== VK_NV_memory_decompression === + using VULKAN_HPP_NAMESPACE::NVMemoryDecompressionExtensionName; + using VULKAN_HPP_NAMESPACE::NVMemoryDecompressionSpecVersion; + + //=== VK_NV_device_generated_commands_compute === + using VULKAN_HPP_NAMESPACE::NVDeviceGeneratedCommandsComputeExtensionName; + using VULKAN_HPP_NAMESPACE::NVDeviceGeneratedCommandsComputeSpecVersion; + + //=== VK_NV_linear_color_attachment === + using VULKAN_HPP_NAMESPACE::NVLinearColorAttachmentExtensionName; + using VULKAN_HPP_NAMESPACE::NVLinearColorAttachmentSpecVersion; + + //=== VK_GOOGLE_surfaceless_query === + using VULKAN_HPP_NAMESPACE::GOOGLESurfacelessQueryExtensionName; + using VULKAN_HPP_NAMESPACE::GOOGLESurfacelessQuerySpecVersion; + + //=== VK_EXT_image_compression_control_swapchain === + using VULKAN_HPP_NAMESPACE::EXTImageCompressionControlSwapchainExtensionName; + using VULKAN_HPP_NAMESPACE::EXTImageCompressionControlSwapchainSpecVersion; + + //=== VK_QCOM_image_processing === + using VULKAN_HPP_NAMESPACE::QCOMImageProcessingExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMImageProcessingSpecVersion; + + //=== VK_EXT_nested_command_buffer === + using VULKAN_HPP_NAMESPACE::EXTNestedCommandBufferExtensionName; + using VULKAN_HPP_NAMESPACE::EXTNestedCommandBufferSpecVersion; + + //=== VK_EXT_external_memory_acquire_unmodified === + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryAcquireUnmodifiedExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExternalMemoryAcquireUnmodifiedSpecVersion; + + //=== VK_EXT_extended_dynamic_state3 === + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicState3ExtensionName; + using VULKAN_HPP_NAMESPACE::EXTExtendedDynamicState3SpecVersion; + + //=== VK_EXT_subpass_merge_feedback === + using VULKAN_HPP_NAMESPACE::EXTSubpassMergeFeedbackExtensionName; + using VULKAN_HPP_NAMESPACE::EXTSubpassMergeFeedbackSpecVersion; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::LUNARGDirectDriverLoadingExtensionName; + using VULKAN_HPP_NAMESPACE::LUNARGDirectDriverLoadingSpecVersion; + + //=== VK_EXT_shader_module_identifier === + using VULKAN_HPP_NAMESPACE::EXTShaderModuleIdentifierExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderModuleIdentifierSpecVersion; + using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeEXT; + + //=== VK_EXT_rasterization_order_attachment_access === + using VULKAN_HPP_NAMESPACE::EXTRasterizationOrderAttachmentAccessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTRasterizationOrderAttachmentAccessSpecVersion; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::NVOpticalFlowExtensionName; + using VULKAN_HPP_NAMESPACE::NVOpticalFlowSpecVersion; + + //=== VK_EXT_legacy_dithering === + using VULKAN_HPP_NAMESPACE::EXTLegacyDitheringExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLegacyDitheringSpecVersion; + + //=== VK_EXT_pipeline_protected_access === + using VULKAN_HPP_NAMESPACE::EXTPipelineProtectedAccessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineProtectedAccessSpecVersion; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + using VULKAN_HPP_NAMESPACE::ANDROIDExternalFormatResolveExtensionName; + using VULKAN_HPP_NAMESPACE::ANDROIDExternalFormatResolveSpecVersion; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_maintenance5 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance5ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance5SpecVersion; + + //=== VK_KHR_ray_tracing_position_fetch === + using VULKAN_HPP_NAMESPACE::KHRRayTracingPositionFetchExtensionName; + using VULKAN_HPP_NAMESPACE::KHRRayTracingPositionFetchSpecVersion; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::EXTShaderObjectExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderObjectSpecVersion; + + //=== VK_QCOM_tile_properties === + using VULKAN_HPP_NAMESPACE::QCOMTilePropertiesExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMTilePropertiesSpecVersion; + + //=== VK_SEC_amigo_profiling === + using VULKAN_HPP_NAMESPACE::SECAmigoProfilingExtensionName; + using VULKAN_HPP_NAMESPACE::SECAmigoProfilingSpecVersion; + + //=== VK_QCOM_multiview_per_view_viewports === + using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewViewportsExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewViewportsSpecVersion; + + //=== VK_NV_ray_tracing_invocation_reorder === + using VULKAN_HPP_NAMESPACE::NVRayTracingInvocationReorderExtensionName; + using VULKAN_HPP_NAMESPACE::NVRayTracingInvocationReorderSpecVersion; + + //=== VK_NV_extended_sparse_address_space === + using VULKAN_HPP_NAMESPACE::NVExtendedSparseAddressSpaceExtensionName; + using VULKAN_HPP_NAMESPACE::NVExtendedSparseAddressSpaceSpecVersion; + + //=== VK_EXT_mutable_descriptor_type === + using VULKAN_HPP_NAMESPACE::EXTMutableDescriptorTypeExtensionName; + using VULKAN_HPP_NAMESPACE::EXTMutableDescriptorTypeSpecVersion; + + //=== VK_EXT_layer_settings === + using VULKAN_HPP_NAMESPACE::EXTLayerSettingsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLayerSettingsSpecVersion; + + //=== VK_ARM_shader_core_builtins === + using VULKAN_HPP_NAMESPACE::ARMShaderCoreBuiltinsExtensionName; + using VULKAN_HPP_NAMESPACE::ARMShaderCoreBuiltinsSpecVersion; + + //=== VK_EXT_pipeline_library_group_handles === + using VULKAN_HPP_NAMESPACE::EXTPipelineLibraryGroupHandlesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPipelineLibraryGroupHandlesSpecVersion; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + using VULKAN_HPP_NAMESPACE::EXTDynamicRenderingUnusedAttachmentsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDynamicRenderingUnusedAttachmentsSpecVersion; + + //=== VK_NV_low_latency2 === + using VULKAN_HPP_NAMESPACE::NVLowLatency2ExtensionName; + using VULKAN_HPP_NAMESPACE::NVLowLatency2SpecVersion; + + //=== VK_KHR_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::KHRCooperativeMatrixExtensionName; + using VULKAN_HPP_NAMESPACE::KHRCooperativeMatrixSpecVersion; + + //=== VK_QCOM_multiview_per_view_render_areas === + using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewRenderAreasExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewRenderAreasSpecVersion; + + //=== VK_KHR_video_maintenance1 === + using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance1SpecVersion; + + //=== VK_NV_per_stage_descriptor_set === + using VULKAN_HPP_NAMESPACE::NVPerStageDescriptorSetExtensionName; + using VULKAN_HPP_NAMESPACE::NVPerStageDescriptorSetSpecVersion; + + //=== VK_QCOM_image_processing2 === + using VULKAN_HPP_NAMESPACE::QCOMImageProcessing2ExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMImageProcessing2SpecVersion; + + //=== VK_QCOM_filter_cubic_weights === + using VULKAN_HPP_NAMESPACE::QCOMFilterCubicWeightsExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMFilterCubicWeightsSpecVersion; + + //=== VK_QCOM_ycbcr_degamma === + using VULKAN_HPP_NAMESPACE::QCOMYcbcrDegammaExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMYcbcrDegammaSpecVersion; + + //=== VK_QCOM_filter_cubic_clamp === + using VULKAN_HPP_NAMESPACE::QCOMFilterCubicClampExtensionName; + using VULKAN_HPP_NAMESPACE::QCOMFilterCubicClampSpecVersion; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + using VULKAN_HPP_NAMESPACE::EXTAttachmentFeedbackLoopDynamicStateExtensionName; + using VULKAN_HPP_NAMESPACE::EXTAttachmentFeedbackLoopDynamicStateSpecVersion; + + //=== VK_KHR_vertex_attribute_divisor === + using VULKAN_HPP_NAMESPACE::KHRVertexAttributeDivisorExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVertexAttributeDivisorSpecVersion; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + using VULKAN_HPP_NAMESPACE::QNXExternalMemoryScreenBufferExtensionName; + using VULKAN_HPP_NAMESPACE::QNXExternalMemoryScreenBufferSpecVersion; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + using VULKAN_HPP_NAMESPACE::MSFTLayeredDriverExtensionName; + using VULKAN_HPP_NAMESPACE::MSFTLayeredDriverSpecVersion; + + //=== VK_KHR_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::KHRCalibratedTimestampsExtensionName; + using VULKAN_HPP_NAMESPACE::KHRCalibratedTimestampsSpecVersion; + + //=== VK_KHR_maintenance6 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance6ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance6SpecVersion; + + //=== VK_NV_descriptor_pool_overallocation === + using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationExtensionName; + using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationSpecVersion; + + //======================== + //=== CONSTEXPR VALUEs === + //======================== + using VULKAN_HPP_NAMESPACE::HeaderVersion; + + //========================= + //=== CONSTEXPR CALLEEs === + //========================= + using VULKAN_HPP_NAMESPACE::apiVersionMajor; + using VULKAN_HPP_NAMESPACE::apiVersionMinor; + using VULKAN_HPP_NAMESPACE::apiVersionPatch; + using VULKAN_HPP_NAMESPACE::apiVersionVariant; + using VULKAN_HPP_NAMESPACE::makeApiVersion; + using VULKAN_HPP_NAMESPACE::makeVersion; + using VULKAN_HPP_NAMESPACE::versionMajor; + using VULKAN_HPP_NAMESPACE::versionMinor; + using VULKAN_HPP_NAMESPACE::versionPatch; + + //========================== + //=== CONSTEXPR CALLERs === + //========================== + using VULKAN_HPP_NAMESPACE::ApiVersion; + using VULKAN_HPP_NAMESPACE::ApiVersion10; + using VULKAN_HPP_NAMESPACE::ApiVersion11; + using VULKAN_HPP_NAMESPACE::ApiVersion12; + using VULKAN_HPP_NAMESPACE::ApiVersion13; + using VULKAN_HPP_NAMESPACE::HeaderVersionComplete; + + //=============== + //=== STRUCTs === + //=============== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AllocationCallbacks; + using VULKAN_HPP_NAMESPACE::ApplicationInfo; + using VULKAN_HPP_NAMESPACE::AttachmentDescription; + using VULKAN_HPP_NAMESPACE::AttachmentReference; + using VULKAN_HPP_NAMESPACE::BaseInStructure; + using VULKAN_HPP_NAMESPACE::BaseOutStructure; + using VULKAN_HPP_NAMESPACE::BindSparseInfo; + using VULKAN_HPP_NAMESPACE::BufferCopy; + using VULKAN_HPP_NAMESPACE::BufferCreateInfo; + using VULKAN_HPP_NAMESPACE::BufferImageCopy; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier; + using VULKAN_HPP_NAMESPACE::BufferViewCreateInfo; + using VULKAN_HPP_NAMESPACE::ClearAttachment; + using VULKAN_HPP_NAMESPACE::ClearColorValue; + using VULKAN_HPP_NAMESPACE::ClearDepthStencilValue; + using VULKAN_HPP_NAMESPACE::ClearRect; + using VULKAN_HPP_NAMESPACE::ClearValue; + using VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::ComponentMapping; + using VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo; + using VULKAN_HPP_NAMESPACE::CopyDescriptorSet; + using VULKAN_HPP_NAMESPACE::DescriptorBufferInfo; + using VULKAN_HPP_NAMESPACE::DescriptorImageInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolSize; + using VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo; + using VULKAN_HPP_NAMESPACE::DispatchIndirectCommand; + using VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand; + using VULKAN_HPP_NAMESPACE::DrawIndirectCommand; + using VULKAN_HPP_NAMESPACE::EventCreateInfo; + using VULKAN_HPP_NAMESPACE::ExtensionProperties; + using VULKAN_HPP_NAMESPACE::Extent2D; + using VULKAN_HPP_NAMESPACE::Extent3D; + using VULKAN_HPP_NAMESPACE::FenceCreateInfo; + using VULKAN_HPP_NAMESPACE::FormatProperties; + using VULKAN_HPP_NAMESPACE::FramebufferCreateInfo; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageBlit; + using VULKAN_HPP_NAMESPACE::ImageCopy; + using VULKAN_HPP_NAMESPACE::ImageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier; + using VULKAN_HPP_NAMESPACE::ImageResolve; + using VULKAN_HPP_NAMESPACE::ImageSubresource; + using VULKAN_HPP_NAMESPACE::ImageSubresourceLayers; + using VULKAN_HPP_NAMESPACE::ImageSubresourceRange; + using VULKAN_HPP_NAMESPACE::ImageViewCreateInfo; + using VULKAN_HPP_NAMESPACE::InstanceCreateInfo; + using VULKAN_HPP_NAMESPACE::LayerProperties; + using VULKAN_HPP_NAMESPACE::MappedMemoryRange; + using VULKAN_HPP_NAMESPACE::MemoryAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryBarrier; + using VULKAN_HPP_NAMESPACE::MemoryHeap; + using VULKAN_HPP_NAMESPACE::MemoryRequirements; + using VULKAN_HPP_NAMESPACE::MemoryType; + using VULKAN_HPP_NAMESPACE::Offset2D; + using VULKAN_HPP_NAMESPACE::Offset3D; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties; + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PushConstantRange; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties; + using VULKAN_HPP_NAMESPACE::Rect2D; + using VULKAN_HPP_NAMESPACE::RenderPassBeginInfo; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerCreateInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo; + using VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryBind; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements; + using VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseMemoryBind; + using VULKAN_HPP_NAMESPACE::SpecializationInfo; + using VULKAN_HPP_NAMESPACE::SpecializationMapEntry; + using VULKAN_HPP_NAMESPACE::StencilOpState; + using VULKAN_HPP_NAMESPACE::SubmitInfo; + using VULKAN_HPP_NAMESPACE::SubpassDependency; + using VULKAN_HPP_NAMESPACE::SubpassDescription; + using VULKAN_HPP_NAMESPACE::SubresourceLayout; + using VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDescription; + using VULKAN_HPP_NAMESPACE::Viewport; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSet; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfoKHR; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo; + using VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindImageMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupportKHR; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntryKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceQueueInfo2; + using VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo; + using VULKAN_HPP_NAMESPACE::ExportFenceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo; + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalBufferProperties; + using VULKAN_HPP_NAMESPACE::ExternalBufferPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceProperties; + using VULKAN_HPP_NAMESPACE::ExternalFencePropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties; + using VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo; + using VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryProperties; + using VULKAN_HPP_NAMESPACE::ExternalMemoryPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties; + using VULKAN_HPP_NAMESPACE::ExternalSemaphorePropertiesKHR; + using VULKAN_HPP_NAMESPACE::FormatProperties2; + using VULKAN_HPP_NAMESPACE::FormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties2; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo; + using VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference; + using VULKAN_HPP_NAMESPACE::InputAttachmentAspectReferenceKHR; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirementsKHR; + using VULKAN_HPP_NAMESPACE::MemoryRequirements2; + using VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIDPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParameterFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointerFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointerFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties2; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties2KHR; + using VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo; + using VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo; + using VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfoKHR; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2KHR; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::AttachmentDescription2; + using VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayoutKHR; + using VULKAN_HPP_NAMESPACE::AttachmentReference2; + using VULKAN_HPP_NAMESPACE::AttachmentReference2KHR; + using VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout; + using VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayoutKHR; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo; + using VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ConformanceVersion; + using VULKAN_HPP_NAMESPACE::ConformanceVersionKHR; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupportEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo; + using VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloat16Int8FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphorePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR; + using VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo; + using VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR; + using VULKAN_HPP_NAMESPACE::SubpassBeginInfo; + using VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::SubpassDependency2; + using VULKAN_HPP_NAMESPACE::SubpassDependency2KHR; + using VULKAN_HPP_NAMESPACE::SubpassDescription2; + using VULKAN_HPP_NAMESPACE::SubpassDescription2KHR; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolveKHR; + using VULKAN_HPP_NAMESPACE::SubpassEndInfo; + using VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR; + using VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo; + using VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfoKHR; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::BlitImageInfo2; + using VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::BufferCopy2; + using VULKAN_HPP_NAMESPACE::BufferCopy2KHR; + using VULKAN_HPP_NAMESPACE::BufferImageCopy2; + using VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfoKHR; + using VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyBufferInfo2; + using VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2; + using VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyImageInfo2; + using VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2; + using VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR; + using VULKAN_HPP_NAMESPACE::DependencyInfo; + using VULKAN_HPP_NAMESPACE::DependencyInfoKHR; + using VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements; + using VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements; + using VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo; + using VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::FormatProperties3; + using VULKAN_HPP_NAMESPACE::FormatProperties3KHR; + using VULKAN_HPP_NAMESPACE::ImageBlit2; + using VULKAN_HPP_NAMESPACE::ImageBlit2KHR; + using VULKAN_HPP_NAMESPACE::ImageCopy2; + using VULKAN_HPP_NAMESPACE::ImageCopy2KHR; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::ImageResolve2; + using VULKAN_HPP_NAMESPACE::ImageResolve2KHR; + using VULKAN_HPP_NAMESPACE::MemoryBarrier2; + using VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedback; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT; + using VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingInfo; + using VULKAN_HPP_NAMESPACE::RenderingInfoKHR; + using VULKAN_HPP_NAMESPACE::ResolveImageInfo2; + using VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::ShaderRequiredSubgroupSizeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SubmitInfo2; + using VULKAN_HPP_NAMESPACE::SubmitInfo2KHR; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::SurfaceFormatKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PresentInfoKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR; + using VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR; + + //=== VK_KHR_display_swapchain === + using VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT; + + //=== VK_AMD_rasterization_order === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD; + + //=== VK_EXT_debug_marker === + using VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR; + + //=== VK_KHR_video_decode_queue === + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR; + + //=== VK_NV_dedicated_allocation === + using VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV; + using VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV; + using VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV; + + //=== VK_EXT_transform_feedback === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX; + using VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX; + using VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX; + + //=== VK_NVX_image_view_handle === + using VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX; + using VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX; + + //=== VK_KHR_video_encode_h264 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR; + + //=== VK_KHR_video_encode_h265 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR; + + //=== VK_KHR_video_decode_h264 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR; + + //=== VK_AMD_texture_gather_bias_lod === + using VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD; + + //=== VK_AMD_shader_info === + using VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD; + using VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD; + + //=== VK_KHR_dynamic_rendering === + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD; + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV; + using VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX; + using VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV; + + //=== VK_NV_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV; + + //=== VK_NV_external_memory === + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV; + using VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_validation_flags === + using VULKAN_HPP_NAMESPACE::ValidationFlagsEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_astc_decode_mode === + using VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT; + + //=== VK_EXT_pipeline_robustness === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + using VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR; + using VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + using VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + using VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR; + + //=== VK_KHR_push_descriptor === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR; + + //=== VK_EXT_conditional_rendering === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT; + using VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT; + + //=== VK_KHR_incremental_present === + using VULKAN_HPP_NAMESPACE::PresentRegionKHR; + using VULKAN_HPP_NAMESPACE::PresentRegionsKHR; + using VULKAN_HPP_NAMESPACE::RectLayerKHR; + + //=== VK_NV_clip_space_w_scaling === + using VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ViewportWScalingNV; + + //=== VK_EXT_display_surface_counter === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT; + + //=== VK_EXT_display_control === + using VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT; + using VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT; + using VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT; + + //=== VK_GOOGLE_display_timing === + using VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE; + using VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE; + using VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE; + using VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE; + + //=== VK_NVX_multiview_per_view_attributes === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + //=== VK_NV_viewport_swizzle === + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ViewportSwizzleNV; + + //=== VK_EXT_discard_rectangles === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT; + + //=== VK_EXT_conservative_rasterization === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT; + + //=== VK_EXT_depth_clip_enable === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT; + + //=== VK_EXT_hdr_metadata === + using VULKAN_HPP_NAMESPACE::HdrMetadataEXT; + using VULKAN_HPP_NAMESPACE::XYColorEXT; + + //=== VK_IMG_relaxed_line_rasterization === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + //=== VK_KHR_shared_presentable_image === + using VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + using VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + using VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR; + + //=== VK_KHR_performance_query === + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR; + using VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR; + using VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR; + + //=== VK_KHR_get_surface_capabilities2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR; + using VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR; + using VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR; + + //=== VK_KHR_get_display_properties2 === + using VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR; + using VULKAN_HPP_NAMESPACE::DisplayProperties2KHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID; + using VULKAN_HPP_NAMESPACE::ExternalFormatANDROID; + using VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID; + using VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX; + using VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX; + using VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX; + using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX; + using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT; + using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT; + using VULKAN_HPP_NAMESPACE::SampleLocationEXT; + using VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT; + using VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT; + + //=== VK_EXT_blend_operation_advanced === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT; + + //=== VK_NV_fragment_coverage_to_color === + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AabbPositionsKHR; + using VULKAN_HPP_NAMESPACE::AabbPositionsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR; + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR; + using VULKAN_HPP_NAMESPACE::TransformMatrixKHR; + using VULKAN_HPP_NAMESPACE::TransformMatrixNV; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR; + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR; + using VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR; + + //=== VK_KHR_ray_query === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR; + + //=== VK_NV_framebuffer_mixed_samples === + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV; + + //=== VK_NV_shader_sm_builtins === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV; + + //=== VK_EXT_image_drm_format_modifier === + using VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + using VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV; + using VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV; + using VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV; + using VULKAN_HPP_NAMESPACE::GeometryAABBNV; + using VULKAN_HPP_NAMESPACE::GeometryDataNV; + using VULKAN_HPP_NAMESPACE::GeometryNV; + using VULKAN_HPP_NAMESPACE::GeometryTrianglesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV; + + //=== VK_NV_representative_fragment_test === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV; + + //=== VK_EXT_filter_cubic === + using VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT; + + //=== VK_EXT_external_memory_host === + using VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT; + using VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT; + + //=== VK_KHR_shader_clock === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR; + + //=== VK_AMD_pipeline_compiler_control === + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD; + + //=== VK_AMD_shader_core_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD; + + //=== VK_KHR_video_decode_h265 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR; + + //=== VK_KHR_global_priority === + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesEXT; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR; + + //=== VK_AMD_memory_overallocation_behavior === + using VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD; + + //=== VK_EXT_vertex_attribute_divisor === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + using VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_compute_shader_derivatives === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV; + + //=== VK_NV_mesh_shader === + using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV; + + //=== VK_NV_shader_image_footprint === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV; + + //=== VK_NV_scissor_exclusive === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV; + + //=== VK_NV_device_diagnostic_checkpoints === + using VULKAN_HPP_NAMESPACE::CheckpointDataNV; + using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV; + + //=== VK_INTEL_shader_integer_functions2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL; + + //=== VK_EXT_pci_bus_info === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT; + + //=== VK_AMD_display_native_hdr === + using VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD; + using VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT; + + //=== VK_KHR_fragment_shading_rate === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR; + + //=== VK_AMD_shader_core_properties2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD; + + //=== VK_AMD_device_coherent_memory === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD; + + //=== VK_EXT_shader_image_atomic_int64 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + //=== VK_EXT_memory_budget === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT; + + //=== VK_EXT_memory_priority === + using VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT; + + //=== VK_KHR_surface_protected_capabilities === + using VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR; + + //=== VK_NV_dedicated_allocation_image_aliasing === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + //=== VK_EXT_buffer_device_address === + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferAddressFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + //=== VK_EXT_validation_features === + using VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT; + + //=== VK_KHR_present_wait === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR; + + //=== VK_NV_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV; + + //=== VK_NV_coverage_reduction_mode === + using VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV; + + //=== VK_EXT_fragment_shader_interlock === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + //=== VK_EXT_ycbcr_image_arrays === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT; + + //=== VK_EXT_provoking_vertex === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT; + using VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT; + using VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT; + + //=== VK_EXT_line_rasterization === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT; + + //=== VK_EXT_shader_atomic_float === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT; + + //=== VK_EXT_index_type_uint8 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT; + + //=== VK_EXT_extended_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT; + + //=== VK_KHR_pipeline_executable_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR; + using VULKAN_HPP_NAMESPACE::PipelineInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineInfoKHR; + + //=== VK_EXT_host_image_copy === + using VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT; + using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT; + using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT; + using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT; + using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT; + + //=== VK_KHR_map_memory2 === + using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR; + + //=== VK_EXT_shader_atomic_float2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + //=== VK_EXT_surface_maintenance1 === + using VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT; + using VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT; + using VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT; + + //=== VK_EXT_swapchain_maintenance1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT; + using VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV; + using VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + using VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV; + + //=== VK_NV_inherited_viewport_scissor === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV; + + //=== VK_EXT_texel_buffer_alignment === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + //=== VK_QCOM_render_pass_transform === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM; + using VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM; + + //=== VK_EXT_depth_bias_control === + using VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT; + using VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT; + + //=== VK_EXT_robustness2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT; + + //=== VK_EXT_custom_border_color === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT; + using VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT; + + //=== VK_KHR_pipeline_library === + using VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR; + + //=== VK_NV_present_barrier === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV; + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV; + using VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV; + + //=== VK_KHR_present_id === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PresentIdKHR; + + //=== VK_KHR_video_encode_queue === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + using VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR; + + //=== VK_NV_device_diagnostics_config === + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV; + using VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV; + using VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_low_latency === + using VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + using VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + using VULKAN_HPP_NAMESPACE::CheckpointData2NV; + using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV; + + //=== VK_EXT_descriptor_buffer === + using VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT; + using VULKAN_HPP_NAMESPACE::DescriptorDataEXT; + using VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT; + using VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT; + + //=== VK_EXT_graphics_pipeline_library === + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + //=== VK_KHR_fragment_shader_barycentric === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + //=== VK_NV_fragment_shading_rate_enums === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV; + + //=== VK_NV_ray_tracing_motion_blur === + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV; + using VULKAN_HPP_NAMESPACE::SRTDataNV; + + //=== VK_EXT_mesh_shader === + using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT; + + //=== VK_EXT_ycbcr_2plane_444_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + //=== VK_EXT_fragment_density_map2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT; + + //=== VK_QCOM_rotated_copy_commands === + using VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM; + + //=== VK_KHR_workgroup_memory_explicit_layout === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + //=== VK_EXT_image_compression_control === + using VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT; + + //=== VK_EXT_attachment_feedback_loop_layout === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + //=== VK_EXT_4444_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT; + + //=== VK_EXT_device_fault === + using VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT; + + //=== VK_EXT_rgba10x6_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + using VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT; + + //=== VK_EXT_physical_device_drm === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT; + + //=== VK_EXT_device_address_binding_report === + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT; + + //=== VK_EXT_depth_clip_control === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT; + + //=== VK_EXT_primitive_topology_list_restart === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + using VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + using VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA; + using VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI; + using VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI; + + //=== VK_HUAWEI_invocation_mask === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI; + + //=== VK_NV_external_memory_rdma === + using VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV; + + //=== VK_EXT_pipeline_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT; + + //=== VK_EXT_frame_boundary === + using VULKAN_HPP_NAMESPACE::FrameBoundaryEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT; + + //=== VK_EXT_multisampled_render_to_single_sampled === + using VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + using VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT; + + //=== VK_EXT_extended_dynamic_state2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT; + + //=== VK_EXT_primitives_generated_query === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + //=== VK_KHR_ray_tracing_maintenance1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + using VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR; + + //=== VK_EXT_image_view_min_lod === + using VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT; + + //=== VK_EXT_multi_draw === + using VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT; + using VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT; + + //=== VK_EXT_image_2d_view_of_3d === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT; + + //=== VK_EXT_shader_tile_image === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT; + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT; + using VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapTriangleEXT; + using VULKAN_HPP_NAMESPACE::MicromapUsageEXT; + using VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + using VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + + //=== VK_EXT_border_color_swizzle === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT; + using VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT; + + //=== VK_EXT_pageable_device_local_memory === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + //=== VK_ARM_shader_core_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM; + + //=== VK_ARM_scheduling_controls === + using VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM; + + //=== VK_EXT_image_sliced_view_of_3d === + using VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + + //=== VK_VALVE_descriptor_set_host_mapping === + using VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + + //=== VK_EXT_depth_clamp_zero_one === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT; + + //=== VK_EXT_non_seamless_cube_map === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + //=== VK_ARM_render_pass_striped === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM; + using VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM; + using VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM; + using VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM; + + //=== VK_QCOM_fragment_density_map_offset === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + using VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM; + + //=== VK_NV_copy_memory_indirect === + using VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV; + + //=== VK_NV_memory_decompression === + using VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV; + + //=== VK_NV_device_generated_commands_compute === + using VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV; + + //=== VK_NV_linear_color_attachment === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV; + + //=== VK_EXT_image_compression_control_swapchain === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + //=== VK_QCOM_image_processing === + using VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM; + + //=== VK_EXT_nested_command_buffer === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT; + + //=== VK_EXT_external_memory_acquire_unmodified === + using VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT; + + //=== VK_EXT_extended_dynamic_state3 === + using VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT; + using VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT; + + //=== VK_EXT_subpass_merge_feedback === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG; + + //=== VK_EXT_shader_module_identifier === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT; + + //=== VK_EXT_rasterization_order_attachment_access === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV; + + //=== VK_EXT_legacy_dithering === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT; + + //=== VK_EXT_pipeline_protected_access === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_maintenance5 === + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; + using VULKAN_HPP_NAMESPACE::ImageSubresource2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR; + + //=== VK_KHR_ray_tracing_position_fetch === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT; + + //=== VK_QCOM_tile_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::TilePropertiesQCOM; + + //=== VK_SEC_amigo_profiling === + using VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC; + + //=== VK_QCOM_multiview_per_view_viewports === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + //=== VK_NV_ray_tracing_invocation_reorder === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + + //=== VK_NV_extended_sparse_address_space === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + //=== VK_EXT_mutable_descriptor_type === + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + + //=== VK_EXT_layer_settings === + using VULKAN_HPP_NAMESPACE::LayerSettingEXT; + using VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT; + + //=== VK_ARM_shader_core_builtins === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + //=== VK_EXT_pipeline_library_group_handles === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + //=== VK_NV_low_latency2 === + using VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV; + using VULKAN_HPP_NAMESPACE::LatencySleepInfoNV; + using VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV; + using VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV; + using VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV; + using VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV; + using VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV; + using VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV; + using VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV; + + //=== VK_KHR_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR; + + //=== VK_QCOM_multiview_per_view_render_areas === + using VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + + //=== VK_KHR_video_maintenance1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR; + using VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR; + + //=== VK_NV_per_stage_descriptor_set === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV; + + //=== VK_QCOM_image_processing2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM; + using VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_weights === + using VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM; + + //=== VK_QCOM_ycbcr_degamma === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_clamp === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + + //=== VK_KHR_vertex_attribute_divisor === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + using VULKAN_HPP_NAMESPACE::ExternalFormatQNX; + using VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + using VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX; + using VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT; + + //=== VK_KHR_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT; + using VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR; + + //=== VK_KHR_maintenance6 === + using VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT; + using VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR; + using VULKAN_HPP_NAMESPACE::BindMemoryStatusKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR; + using VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT; + + //=== VK_NV_descriptor_pool_overallocation === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + //=============== + //=== HANDLEs === + //=============== + + using VULKAN_HPP_NAMESPACE::isVulkanHandleType; + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::Buffer; + using VULKAN_HPP_NAMESPACE::BufferView; + using VULKAN_HPP_NAMESPACE::CommandBuffer; + using VULKAN_HPP_NAMESPACE::CommandPool; + using VULKAN_HPP_NAMESPACE::DescriptorPool; + using VULKAN_HPP_NAMESPACE::DescriptorSet; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + using VULKAN_HPP_NAMESPACE::Device; + using VULKAN_HPP_NAMESPACE::DeviceMemory; + using VULKAN_HPP_NAMESPACE::Event; + using VULKAN_HPP_NAMESPACE::Fence; + using VULKAN_HPP_NAMESPACE::Framebuffer; + using VULKAN_HPP_NAMESPACE::Image; + using VULKAN_HPP_NAMESPACE::ImageView; + using VULKAN_HPP_NAMESPACE::Instance; + using VULKAN_HPP_NAMESPACE::PhysicalDevice; + using VULKAN_HPP_NAMESPACE::Pipeline; + using VULKAN_HPP_NAMESPACE::PipelineCache; + using VULKAN_HPP_NAMESPACE::PipelineLayout; + using VULKAN_HPP_NAMESPACE::QueryPool; + using VULKAN_HPP_NAMESPACE::Queue; + using VULKAN_HPP_NAMESPACE::RenderPass; + using VULKAN_HPP_NAMESPACE::Sampler; + using VULKAN_HPP_NAMESPACE::Semaphore; + using VULKAN_HPP_NAMESPACE::ShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::PrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::SurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::SwapchainKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::VideoSessionKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::CuFunctionNVX; + using VULKAN_HPP_NAMESPACE::CuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::CudaFunctionNV; + using VULKAN_HPP_NAMESPACE::CudaModuleNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::MicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::ShaderEXT; + + //====================== + //=== UNIQUE HANDLEs === + //====================== + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::UniqueBuffer; + using VULKAN_HPP_NAMESPACE::UniqueBufferView; + using VULKAN_HPP_NAMESPACE::UniqueCommandBuffer; + using VULKAN_HPP_NAMESPACE::UniqueCommandPool; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorPool; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorSet; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorSetLayout; + using VULKAN_HPP_NAMESPACE::UniqueDevice; + using VULKAN_HPP_NAMESPACE::UniqueDeviceMemory; + using VULKAN_HPP_NAMESPACE::UniqueEvent; + using VULKAN_HPP_NAMESPACE::UniqueFence; + using VULKAN_HPP_NAMESPACE::UniqueFramebuffer; + using VULKAN_HPP_NAMESPACE::UniqueImage; + using VULKAN_HPP_NAMESPACE::UniqueImageView; + using VULKAN_HPP_NAMESPACE::UniqueInstance; + using VULKAN_HPP_NAMESPACE::UniquePipeline; + using VULKAN_HPP_NAMESPACE::UniquePipelineCache; + using VULKAN_HPP_NAMESPACE::UniquePipelineLayout; + using VULKAN_HPP_NAMESPACE::UniqueQueryPool; + using VULKAN_HPP_NAMESPACE::UniqueRenderPass; + using VULKAN_HPP_NAMESPACE::UniqueSampler; + using VULKAN_HPP_NAMESPACE::UniqueSemaphore; + using VULKAN_HPP_NAMESPACE::UniqueShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::UniqueDescriptorUpdateTemplate; + using VULKAN_HPP_NAMESPACE::UniqueSamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::UniquePrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::UniqueSurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::UniqueSwapchainKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::UniqueDisplayKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::UniqueDebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::UniqueVideoSessionKHR; + using VULKAN_HPP_NAMESPACE::UniqueVideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::UniqueCuFunctionNVX; + using VULKAN_HPP_NAMESPACE::UniqueCuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::UniqueDebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::UniqueValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::UniquePerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::UniqueDeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutNV; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::UniqueCudaFunctionNV; + using VULKAN_HPP_NAMESPACE::UniqueCudaModuleNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::UniqueBufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::UniqueMicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::UniqueOpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::UniqueHandleTraits; + using VULKAN_HPP_NAMESPACE::UniqueShaderEXT; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //====================== + //=== SHARED HANDLEs === + //====================== + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::SharedBuffer; + using VULKAN_HPP_NAMESPACE::SharedBufferView; + using VULKAN_HPP_NAMESPACE::SharedCommandBuffer; + using VULKAN_HPP_NAMESPACE::SharedCommandPool; + using VULKAN_HPP_NAMESPACE::SharedDescriptorPool; + using VULKAN_HPP_NAMESPACE::SharedDescriptorSet; + using VULKAN_HPP_NAMESPACE::SharedDescriptorSetLayout; + using VULKAN_HPP_NAMESPACE::SharedDevice; + using VULKAN_HPP_NAMESPACE::SharedDeviceMemory; + using VULKAN_HPP_NAMESPACE::SharedEvent; + using VULKAN_HPP_NAMESPACE::SharedFence; + using VULKAN_HPP_NAMESPACE::SharedFramebuffer; + using VULKAN_HPP_NAMESPACE::SharedImage; + using VULKAN_HPP_NAMESPACE::SharedImageView; + using VULKAN_HPP_NAMESPACE::SharedInstance; + using VULKAN_HPP_NAMESPACE::SharedPhysicalDevice; + using VULKAN_HPP_NAMESPACE::SharedPipeline; + using VULKAN_HPP_NAMESPACE::SharedPipelineCache; + using VULKAN_HPP_NAMESPACE::SharedPipelineLayout; + using VULKAN_HPP_NAMESPACE::SharedQueryPool; + using VULKAN_HPP_NAMESPACE::SharedQueue; + using VULKAN_HPP_NAMESPACE::SharedRenderPass; + using VULKAN_HPP_NAMESPACE::SharedSampler; + using VULKAN_HPP_NAMESPACE::SharedSemaphore; + using VULKAN_HPP_NAMESPACE::SharedShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::SharedDescriptorUpdateTemplate; + using VULKAN_HPP_NAMESPACE::SharedSamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::SharedPrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::SharedSurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::SharedSwapchainKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::SharedDisplayKHR; + using VULKAN_HPP_NAMESPACE::SharedDisplayModeKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::SharedDebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::SharedVideoSessionKHR; + using VULKAN_HPP_NAMESPACE::SharedVideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::SharedCuFunctionNVX; + using VULKAN_HPP_NAMESPACE::SharedCuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::SharedDebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::SharedValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::SharedPerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::SharedDeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutNV; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_NAMESPACE::SharedCudaFunctionNV; + using VULKAN_HPP_NAMESPACE::SharedCudaModuleNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::SharedBufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::SharedMicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::SharedOpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::SharedHandleTraits; + using VULKAN_HPP_NAMESPACE::SharedShaderEXT; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //=========================== + //=== COMMAND Definitions === + //=========================== + using VULKAN_HPP_NAMESPACE::createInstance; + using VULKAN_HPP_NAMESPACE::enumerateInstanceExtensionProperties; + using VULKAN_HPP_NAMESPACE::enumerateInstanceLayerProperties; + using VULKAN_HPP_NAMESPACE::enumerateInstanceVersion; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + using VULKAN_HPP_NAMESPACE::createInstanceUnique; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + using VULKAN_HPP_NAMESPACE::StructExtends; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) + using VULKAN_HPP_NAMESPACE::DynamicLoader; +#endif /*VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL*/ + + //===================== + //=== Format Traits === + //===================== + using VULKAN_HPP_NAMESPACE::blockExtent; + using VULKAN_HPP_NAMESPACE::blockSize; + using VULKAN_HPP_NAMESPACE::compatibilityClass; + using VULKAN_HPP_NAMESPACE::componentBits; + using VULKAN_HPP_NAMESPACE::componentCount; + using VULKAN_HPP_NAMESPACE::componentName; + using VULKAN_HPP_NAMESPACE::componentNumericFormat; + using VULKAN_HPP_NAMESPACE::componentPlaneIndex; + using VULKAN_HPP_NAMESPACE::componentsAreCompressed; + using VULKAN_HPP_NAMESPACE::compressionScheme; + using VULKAN_HPP_NAMESPACE::isCompressed; + using VULKAN_HPP_NAMESPACE::packed; + using VULKAN_HPP_NAMESPACE::planeCompatibleFormat; + using VULKAN_HPP_NAMESPACE::planeCount; + using VULKAN_HPP_NAMESPACE::planeHeightDivisor; + using VULKAN_HPP_NAMESPACE::planeWidthDivisor; + using VULKAN_HPP_NAMESPACE::texelsPerBlock; + + //====================================== + //=== Extension inspection functions === + //====================================== + using VULKAN_HPP_NAMESPACE::getDeprecatedExtensions; + using VULKAN_HPP_NAMESPACE::getDeviceExtensions; + using VULKAN_HPP_NAMESPACE::getExtensionDepends; + using VULKAN_HPP_NAMESPACE::getExtensionDeprecatedBy; + using VULKAN_HPP_NAMESPACE::getExtensionObsoletedBy; + using VULKAN_HPP_NAMESPACE::getExtensionPromotedTo; + using VULKAN_HPP_NAMESPACE::getInstanceExtensions; + using VULKAN_HPP_NAMESPACE::getObsoletedExtensions; + using VULKAN_HPP_NAMESPACE::getPromotedExtensions; + using VULKAN_HPP_NAMESPACE::isDeprecatedExtension; + using VULKAN_HPP_NAMESPACE::isDeviceExtension; + using VULKAN_HPP_NAMESPACE::isInstanceExtension; + using VULKAN_HPP_NAMESPACE::isObsoletedExtension; + using VULKAN_HPP_NAMESPACE::isPromotedExtension; + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !defined( VULKAN_HPP_NO_EXCEPTIONS ) + namespace VULKAN_HPP_RAII_NAMESPACE + { + //====================== + //=== RAII HARDCODED === + //====================== + + using VULKAN_HPP_RAII_NAMESPACE::Context; + using VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::exchange; + using VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher; + + //==================== + //=== RAII HANDLEs === + //==================== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_RAII_NAMESPACE::Buffer; + using VULKAN_HPP_RAII_NAMESPACE::BufferView; + using VULKAN_HPP_RAII_NAMESPACE::CommandBuffer; + using VULKAN_HPP_RAII_NAMESPACE::CommandBuffers; + using VULKAN_HPP_RAII_NAMESPACE::CommandPool; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorPool; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSet; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSets; + using VULKAN_HPP_RAII_NAMESPACE::Device; + using VULKAN_HPP_RAII_NAMESPACE::DeviceMemory; + using VULKAN_HPP_RAII_NAMESPACE::Event; + using VULKAN_HPP_RAII_NAMESPACE::Fence; + using VULKAN_HPP_RAII_NAMESPACE::Framebuffer; + using VULKAN_HPP_RAII_NAMESPACE::Image; + using VULKAN_HPP_RAII_NAMESPACE::ImageView; + using VULKAN_HPP_RAII_NAMESPACE::Instance; + using VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice; + using VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices; + using VULKAN_HPP_RAII_NAMESPACE::Pipeline; + using VULKAN_HPP_RAII_NAMESPACE::PipelineCache; + using VULKAN_HPP_RAII_NAMESPACE::PipelineLayout; + using VULKAN_HPP_RAII_NAMESPACE::Pipelines; + using VULKAN_HPP_RAII_NAMESPACE::QueryPool; + using VULKAN_HPP_RAII_NAMESPACE::Queue; + using VULKAN_HPP_RAII_NAMESPACE::RenderPass; + using VULKAN_HPP_RAII_NAMESPACE::Sampler; + using VULKAN_HPP_RAII_NAMESPACE::Semaphore; + using VULKAN_HPP_RAII_NAMESPACE::ShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate; + using VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR; + using VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs; + + //=== VK_KHR_display === + using VULKAN_HPP_RAII_NAMESPACE::DisplayKHR; + using VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs; + using VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR; + using VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX; + using VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + using VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV; + using VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_RAII_NAMESPACE::MicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_RAII_NAMESPACE::ShaderEXT; + using VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs; + + } // namespace VULKAN_HPP_RAII_NAMESPACE +#endif +} // namespace VULKAN_HPP_NAMESPACE diff --git a/MacroLibX/third_party/vulkan/vulkan.h b/MacroLibX/third_party/vulkan/vulkan.h new file mode 100644 index 0000000..426cff5 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan.h @@ -0,0 +1,99 @@ +#ifndef VULKAN_H_ +#define VULKAN_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#include "vk_platform.h" +#include "vulkan_core.h" + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +#include "vulkan_android.h" +#endif + +#ifdef VK_USE_PLATFORM_FUCHSIA +#include +#include "vulkan_fuchsia.h" +#endif + +#ifdef VK_USE_PLATFORM_IOS_MVK +#include "vulkan_ios.h" +#endif + + +#ifdef VK_USE_PLATFORM_MACOS_MVK +#include "vulkan_macos.h" +#endif + +#ifdef VK_USE_PLATFORM_METAL_EXT +#include "vulkan_metal.h" +#endif + +#ifdef VK_USE_PLATFORM_VI_NN +#include "vulkan_vi.h" +#endif + + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#include "vulkan_wayland.h" +#endif + + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#include +#include "vulkan_win32.h" +#endif + + +#ifdef VK_USE_PLATFORM_XCB_KHR +#include +#include "vulkan_xcb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_KHR +#include +#include "vulkan_xlib.h" +#endif + + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +#include +#include "vulkan_directfb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT +#include +#include +#include "vulkan_xlib_xrandr.h" +#endif + + +#ifdef VK_USE_PLATFORM_GGP +#include +#include "vulkan_ggp.h" +#endif + + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +#include +#include "vulkan_screen.h" +#endif + + +#ifdef VK_USE_PLATFORM_SCI +#include +#include +#include "vulkan_sci.h" +#endif + + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#include "vulkan_beta.h" +#endif + +#endif // VULKAN_H_ diff --git a/MacroLibX/third_party/vulkan/vulkan.hpp b/MacroLibX/third_party/vulkan/vulkan.hpp new file mode 100644 index 0000000..0dc5dc3 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan.hpp @@ -0,0 +1,19951 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HPP +#define VULKAN_HPP + +#include +#include // ArrayWrapperND +#include // strnlen +#include // std::string +#include +#include + +#if 17 <= VULKAN_HPP_CPP_VERSION +# include // std::string_view +#endif + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# include // std::tie +# include // std::vector +#endif + +#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) +# include // std::is_error_code_enum +#endif + +#if ( VULKAN_HPP_ASSERT == assert ) +# include +#endif + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) +# include +# elif defined( _WIN32 ) +typedef struct HINSTANCE__ * HINSTANCE; +# if defined( _WIN64 ) +typedef int64_t( __stdcall * FARPROC )(); +# else +typedef int( __stdcall * FARPROC )(); +# endif +extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName ); +extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule ); +extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName ); +# endif +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +# include +#endif + +#if defined( VULKAN_HPP_SUPPORT_SPAN ) +# include +#endif + +static_assert( VK_HEADER_VERSION == 274, "Wrong VK_HEADER_VERSION!" ); + +// includes through some other header +// this results in major(x) being resolved to gnu_dev_major(x) +// which is an expression in a constructor initializer list. +#if defined( major ) +# undef major +#endif +#if defined( minor ) +# undef minor +#endif + +// Windows defines MemoryBarrier which is deprecated and collides +// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct. +#if defined( MemoryBarrier ) +# undef MemoryBarrier +#endif + +// XLib.h defines True/False, which collides with our vk::True/vk::False +// -> undef them and provide some namepace-secure constexpr +#if defined( True ) +# undef True +constexpr int True = 1; +#endif +#if defined( False ) +# undef False +constexpr int False = 0; +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + template + class ArrayWrapper1D : public std::array + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array const & data ) VULKAN_HPP_NOEXCEPT : std::array( data ) {} + + template ::value, int>::type = 0> + VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string const & data ) VULKAN_HPP_NOEXCEPT + { + copy( data.data(), data.length() ); + } + +#if 17 <= VULKAN_HPP_CPP_VERSION + template ::value, int>::type = 0> + VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string_view data ) VULKAN_HPP_NOEXCEPT + { + copy( data.data(), data.length() ); + } +#endif + +#if ( VK_USE_64_BIT_PTR_DEFINES == 0 ) + // on 32 bit compiles, needs overloads on index type int to resolve ambiguities + VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT + { + return std::array::operator[]( index ); + } + + T & operator[]( int index ) VULKAN_HPP_NOEXCEPT + { + return std::array::operator[]( index ); + } +#endif + + operator T const *() const VULKAN_HPP_NOEXCEPT + { + return this->data(); + } + + operator T *() VULKAN_HPP_NOEXCEPT + { + return this->data(); + } + + template ::value, int>::type = 0> + operator std::string() const + { + return std::string( this->data(), strnlen( this->data(), N ) ); + } + +#if 17 <= VULKAN_HPP_CPP_VERSION + template ::value, int>::type = 0> + operator std::string_view() const + { + return std::string_view( this->data(), strnlen( this->data(), N ) ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template ::value, int>::type = 0> + std::strong_ordering operator<=>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) <=> *static_cast const *>( &rhs ); + } +#else + template ::value, int>::type = 0> + bool operator<( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) < *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator<=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) <= *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) > *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator>=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) >= *static_cast const *>( &rhs ); + } +#endif + + template ::value, int>::type = 0> + bool operator==( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) == *static_cast const *>( &rhs ); + } + + template ::value, int>::type = 0> + bool operator!=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *static_cast const *>( this ) != *static_cast const *>( &rhs ); + } + + private: + VULKAN_HPP_CONSTEXPR_14 void copy( char const * data, size_t len ) VULKAN_HPP_NOEXCEPT + { + size_t n = std::min( N, len ); + for ( size_t i = 0; i < n; ++i ) + { + ( *this )[i] = data[i]; + } + for ( size_t i = n; i < N; ++i ) + { + ( *this )[i] = 0; + } + } + }; + + // specialization of relational operators between std::string and arrays of chars + template + bool operator<( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs < rhs.data(); + } + + template + bool operator<=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs <= rhs.data(); + } + + template + bool operator>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs > rhs.data(); + } + + template + bool operator>=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs >= rhs.data(); + } + + template + bool operator==( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs == rhs.data(); + } + + template + bool operator!=( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs != rhs.data(); + } + + template + class ArrayWrapper2D : public std::array, N> + { + public: + VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array, N>() {} + + VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array, N> const & data ) VULKAN_HPP_NOEXCEPT + : std::array, N>( *reinterpret_cast, N> const *>( &data ) ) + { + } + }; + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + class ArrayProxy + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + { + } + + VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + { + } + + ArrayProxy( T const & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + { + } + + ArrayProxy( uint32_t count, T const * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + { + } + + template + ArrayProxy( T const ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT + : m_count( C ) + , m_ptr( ptr ) + { + } + +# if __GNUC__ >= 9 +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Winit-list-lifetime" +# endif + + ArrayProxy( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + { + } + + template ::value, int>::type = 0> + ArrayProxy( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + { + } + +# if __GNUC__ >= 9 +# pragma GCC diagnostic pop +# endif + + // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly + // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement. + template ().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value>::type * = nullptr> + ArrayProxy( V const & v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.data() ) + { + } + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return ( m_count == 0 ); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T const * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T const * m_ptr; + }; + + template + class ArrayProxyNoTemporaries + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + { + } + + VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_count( 0 ) + , m_ptr( nullptr ) + { + } + + ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + { + } + + template + ArrayProxyNoTemporaries( V && value ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + : m_count( 1 ) + , m_ptr( &value ) + { + } + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type && value ) = delete; + + ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + { + } + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT + : m_count( count ) + , m_ptr( ptr ) + { + } + + template + ArrayProxyNoTemporaries( T ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT + : m_count( C ) + , m_ptr( ptr ) + { + } + + template + ArrayProxyNoTemporaries( T ( &&ptr )[C] ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT + : m_count( C ) + , m_ptr( ptr ) + { + } + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( typename std::remove_const::type ( &&ptr )[C] ) = delete; + + ArrayProxyNoTemporaries( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + { + } + + ArrayProxyNoTemporaries( std::initializer_list const && list ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + { + } + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> const && list ) = delete; + + ArrayProxyNoTemporaries( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + { + } + + ArrayProxyNoTemporaries( std::initializer_list && list ) = delete; + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( list.size() ) ) + , m_ptr( list.begin() ) + { + } + + template ::value, int>::type = 0> + ArrayProxyNoTemporaries( std::initializer_list::type> && list ) = delete; + + // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. + template ().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value>::type * = nullptr> + ArrayProxyNoTemporaries( V & v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.data() ) + { + } + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_count && m_ptr ); + return *( m_ptr + m_count - 1 ); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return ( m_count == 0 ); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; + }; + + template + class StridedArrayProxy : protected ArrayProxy + { + public: + using ArrayProxy::ArrayProxy; + + StridedArrayProxy( uint32_t count, T const * ptr, uint32_t stride ) VULKAN_HPP_NOEXCEPT + : ArrayProxy( count, ptr ) + , m_stride( stride ) + { + VULKAN_HPP_ASSERT( sizeof( T ) <= stride ); + } + + using ArrayProxy::begin; + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return reinterpret_cast( static_cast( begin() ) + size() * m_stride ); + } + + using ArrayProxy::front; + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( begin() && size() ); + return *reinterpret_cast( static_cast( begin() ) + ( size() - 1 ) * m_stride ); + } + + using ArrayProxy::empty; + using ArrayProxy::size; + using ArrayProxy::data; + + uint32_t stride() const + { + return m_stride; + } + + private: + uint32_t m_stride = sizeof( T ); + }; + + template + class Optional + { + public: + Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT + { + m_ptr = &reference; + } + + Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT + { + m_ptr = ptr; + } + + Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_ptr = nullptr; + } + + operator RefType *() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + RefType const * operator->() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_ptr; + } + + private: + RefType * m_ptr; + }; + + template + struct StructExtends + { + enum + { + value = false + }; + }; + + template + struct IsPartOfStructureChain + { + static const bool valid = false; + }; + + template + struct IsPartOfStructureChain + { + static const bool valid = std::is_same::value || IsPartOfStructureChain::valid; + }; + + template + struct StructureChainContains + { + static const bool value = std::is_same>::type>::value || + StructureChainContains::value; + }; + + template + struct StructureChainContains<0, T, ChainElements...> + { + static const bool value = std::is_same>::type>::value; + }; + + template + struct StructureChainValidation + { + using TestType = typename std::tuple_element>::type; + static const bool valid = StructExtends>::type>::value && + ( TestType::allowDuplicate || !StructureChainContains::value ) && + StructureChainValidation::valid; + }; + + template + struct StructureChainValidation<0, ChainElements...> + { + static const bool valid = true; + }; + + template + class StructureChain : public std::tuple + { + public: + StructureChain() VULKAN_HPP_NOEXCEPT + { + static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); + link(); + } + + StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple( rhs ) + { + static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); + link( &std::get<0>( *this ), + &std::get<0>( rhs ), + reinterpret_cast( &std::get<0>( *this ) ), + reinterpret_cast( &std::get<0>( rhs ) ) ); + } + + StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT : std::tuple( std::forward>( rhs ) ) + { + static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); + link( &std::get<0>( *this ), + &std::get<0>( rhs ), + reinterpret_cast( &std::get<0>( *this ) ), + reinterpret_cast( &std::get<0>( rhs ) ) ); + } + + StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple( elems... ) + { + static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); + link(); + } + + StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT + { + std::tuple::operator=( rhs ); + link( &std::get<0>( *this ), + &std::get<0>( rhs ), + reinterpret_cast( &std::get<0>( *this ) ), + reinterpret_cast( &std::get<0>( rhs ) ) ); + return *this; + } + + StructureChain & operator=( StructureChain && rhs ) = delete; + + template >::type, size_t Which = 0> + T & get() VULKAN_HPP_NOEXCEPT + { + return std::get::value>( static_cast &>( *this ) ); + } + + template >::type, size_t Which = 0> + T const & get() const VULKAN_HPP_NOEXCEPT + { + return std::get::value>( static_cast const &>( *this ) ); + } + + template + std::tuple get() VULKAN_HPP_NOEXCEPT + { + return std::tie( get(), get(), get()... ); + } + + template + std::tuple get() const VULKAN_HPP_NOEXCEPT + { + return std::tie( get(), get(), get()... ); + } + + // assign a complete structure to the StructureChain without modifying the chaining + template >::type, size_t Which = 0> + StructureChain & assign( const T & rhs ) VULKAN_HPP_NOEXCEPT + { + T & lhs = get(); + void * pNext = lhs.pNext; + lhs = rhs; + lhs.pNext = pNext; + return *this; + } + + template + typename std::enable_if>::type>::value && ( Which == 0 ), bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT + { + return true; + } + + template + typename std::enable_if>::type>::value || ( Which != 0 ), bool>::type + isLinked() const VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, "Can't unlink Structure that's not part of this StructureChain!" ); + return isLinked( reinterpret_cast( &get() ) ); + } + + template + typename std::enable_if>::type>::value || ( Which != 0 ), void>::type + relink() VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, "Can't relink Structure that's not part of this StructureChain!" ); + auto pNext = reinterpret_cast( &get() ); + VULKAN_HPP_ASSERT( !isLinked( pNext ) ); + auto & headElement = std::get<0>( static_cast &>( *this ) ); + pNext->pNext = reinterpret_cast( headElement.pNext ); + headElement.pNext = pNext; + } + + template + typename std::enable_if>::type>::value || ( Which != 0 ), void>::type + unlink() VULKAN_HPP_NOEXCEPT + { + static_assert( IsPartOfStructureChain::valid, "Can't unlink Structure that's not part of this StructureChain!" ); + unlink( reinterpret_cast( &get() ) ); + } + + private: + template + struct ChainElementIndex : ChainElementIndex + { + }; + + template + struct ChainElementIndex::value, void>::type, First, Types...> + : ChainElementIndex + { + }; + + template + struct ChainElementIndex::value, void>::type, First, Types...> + : ChainElementIndex + { + }; + + template + struct ChainElementIndex::value, void>::type, First, Types...> + : std::integral_constant + { + }; + + bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT + { + VkBaseInStructure const * elementPtr = + reinterpret_cast( &std::get<0>( static_cast const &>( *this ) ) ); + while ( elementPtr ) + { + if ( elementPtr->pNext == pNext ) + { + return true; + } + elementPtr = elementPtr->pNext; + } + return false; + } + + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT + { + auto & x = std::get( static_cast &>( *this ) ); + x.pNext = &std::get( static_cast &>( *this ) ); + link(); + } + + template + typename std::enable_if::type link() VULKAN_HPP_NOEXCEPT + { + } + + void link( void * dstBase, void const * srcBase, VkBaseOutStructure * dst, VkBaseInStructure const * src ) + { + while ( src->pNext ) + { + std::ptrdiff_t offset = reinterpret_cast( src->pNext ) - reinterpret_cast( srcBase ); + dst->pNext = reinterpret_cast( reinterpret_cast( dstBase ) + offset ); + dst = dst->pNext; + src = src->pNext; + } + dst->pNext = nullptr; + } + + void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT + { + VkBaseOutStructure * elementPtr = reinterpret_cast( &std::get<0>( static_cast &>( *this ) ) ); + while ( elementPtr && ( elementPtr->pNext != pNext ) ) + { + elementPtr = elementPtr->pNext; + } + if ( elementPtr ) + { + elementPtr->pNext = pNext->pNext; + } + else + { + VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked ! + } + } + }; + + // interupt the VULKAN_HPP_NAMESPACE for a moment to add specializations of std::tuple_size and std::tuple_element for the StructureChain! +} + +namespace std +{ + template + struct tuple_size> + { + static constexpr size_t value = std::tuple_size>::value; + }; + + template + struct tuple_element> + { + using type = typename std::tuple_element>::type; + }; +} // namespace std + +namespace VULKAN_HPP_NAMESPACE +{ + +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + class UniqueHandleTraits; + + template + class UniqueHandle : public UniqueHandleTraits::deleter + { + private: + using Deleter = typename UniqueHandleTraits::deleter; + + public: + using element_type = Type; + + UniqueHandle() : Deleter(), m_value() {} + + explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : Deleter( deleter ) + , m_value( value ) + { + } + + UniqueHandle( UniqueHandle const & ) = delete; + + UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + : Deleter( std::move( static_cast( other ) ) ) + , m_value( other.release() ) + { + } + + ~UniqueHandle() VULKAN_HPP_NOEXCEPT + { + if ( m_value ) + { + this->destroy( m_value ); + } + } + + UniqueHandle & operator=( UniqueHandle const & ) = delete; + + UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + { + reset( other.release() ); + *static_cast( this ) = std::move( static_cast( other ) ); + return *this; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_value.operator bool(); + } + + Type const * operator->() const VULKAN_HPP_NOEXCEPT + { + return &m_value; + } + + Type * operator->() VULKAN_HPP_NOEXCEPT + { + return &m_value; + } + + Type const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + Type & operator*() VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + const Type & get() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + Type & get() VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT + { + if ( m_value != value ) + { + if ( m_value ) + { + this->destroy( m_value ); + } + m_value = value; + } + } + + Type release() VULKAN_HPP_NOEXCEPT + { + Type value = m_value; + m_value = nullptr; + return value; + } + + void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_value, rhs.m_value ); + std::swap( static_cast( *this ), static_cast( rhs ) ); + } + + private: + Type m_value; + }; + + template + VULKAN_HPP_INLINE std::vector uniqueToRaw( std::vector const & handles ) + { + std::vector newBuffer( handles.size() ); + std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } ); + return newBuffer; + } + + template + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + lhs.swap( rhs ); + } +# endif +#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE + + class DispatchLoaderBase + { + public: + DispatchLoaderBase() = default; + DispatchLoaderBase( std::nullptr_t ) +#if !defined( NDEBUG ) + : m_valid( false ) +#endif + { + } + +#if !defined( NDEBUG ) + size_t getVkHeaderVersion() const + { + VULKAN_HPP_ASSERT( m_valid ); + return vkHeaderVersion; + } + + private: + size_t vkHeaderVersion = VK_HEADER_VERSION; + bool m_valid = true; +#endif + }; + +#if !defined( VK_NO_PROTOTYPES ) + class DispatchLoaderStatic : public DispatchLoaderBase + { + public: + //=== VK_VERSION_1_0 === + + VkResult + vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); + } + + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyInstance( instance, pAllocator ); + } + + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } + + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + } + + void + vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + } + + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } + + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceProcAddr( device, pName ); + } + + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } + + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDevice( device, pAllocator ); + } + + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + } + + VkResult + vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + } + + void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + } + + VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + } + + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueWaitIdle( queue ); + } + + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeviceWaitIdle( device ); + } + + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + } + + void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeMemory( device, memory, pAllocator ); + } + + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } + + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } + + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + } + + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } + + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } + + void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + } + + void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + } + + VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + } + + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + } + + void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFence( device, fence, pAllocator ); + } + + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } + + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceStatus( device, fence ); + } + + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } + + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + } + + void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySemaphore( device, semaphore, pAllocator ); + } + + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + } + + void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyEvent( device, event, pAllocator ); + } + + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEventStatus( device, event ); + } + + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } + + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } + + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + } + + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + } + + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } + + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + } + + void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBuffer( device, buffer, pAllocator ); + } + + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferView( device, bufferView, pAllocator ); + } + + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + } + + void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImage( device, image, pAllocator ); + } + + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + } + + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImageView( device, imageView, pAllocator ); + } + + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + } + + void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + } + + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + } + + void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + } + + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } + + VkResult + vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipeline( device, pipeline, pAllocator ); + } + + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + } + + void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + } + + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + } + + void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySampler( device, sampler, pAllocator ); + } + + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + } + + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + } + + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + } + + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + } + + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } + + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } + + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + } + + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } + + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + } + + void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + } + + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + } + + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } + + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + } + + void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + } + + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } + + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } + + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + } + + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); + } + + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEndCommandBuffer( commandBuffer ); + } + + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandBuffer( commandBuffer, flags ); + } + + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); + } + + void + vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); + } + + void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); + } + + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); + } + + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + } + + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + } + + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + } + + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + } + + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + } + + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets( + commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } + + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + } + + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); + } + + void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + } + + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); + } + + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } + + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } + + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); + } + + void + vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + } + + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + } + + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + } + + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + } + + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + } + + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + } + + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + } + + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + } + + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + } + + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + } + + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + } + + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass( commandBuffer, contents ); + } + + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass( commandBuffer ); + } + + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + } + + //=== VK_VERSION_1_1 === + + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } + + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } + + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } + + void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + } + + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + } + + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + + void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + } + + //=== VK_VERSION_1_2 === + + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); + } + + void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); + } + + VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphore( device, pSignalInfo ); + } + + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddress( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); + } + + //=== VK_VERSION_1_3 === + + VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); + } + + VkResult vkCreatePrivateDataSlot( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); + } + + void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); + } + + void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); + } + + void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); + } + + void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); + } + + void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); + } + + void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); + } + + void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRendering( commandBuffer ); + } + + void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullMode( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFace( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); + } + + void vkGetDeviceBufferMemoryRequirements( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_surface === + + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } + + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + } + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + } + + //=== VK_KHR_swapchain === + + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + } + + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkAcquireNextImageKHR( + VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + } + + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueuePresentKHR( queue, pPresentInfo ); + } + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + } + + VkResult + vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + } + + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + } + + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + } + + //=== VK_KHR_display === + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_display_swapchain === + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + } + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } + + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } + + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } + + //=== VK_EXT_debug_marker === + + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + } + + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + } + + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); + } + + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer ); + } + + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); + } + + //=== VK_KHR_video_queue === + + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR * pVideoProfile, + VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); + } + + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); + } + + VkResult vkCreateVideoSessionKHR( VkDevice device, + const VkVideoSessionCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); + } + + void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); + } + + VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); + } + + VkResult vkBindVideoSessionMemoryKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); + } + + VkResult vkCreateVideoSessionParametersKHR( VkDevice device, + const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); + } + + VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); + } + + void vkDestroyVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); + } + + void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); + } + + void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); + } + + void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); + } + + //=== VK_KHR_video_decode_queue === + + void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); + } + + //=== VK_EXT_transform_feedback === + + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + } + + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); + } + + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); + } + + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + } + + //=== VK_NVX_binary_import === + + VkResult vkCreateCuModuleNVX( VkDevice device, + const VkCuModuleCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkCreateCuFunctionNVX( VkDevice device, + const VkCuFunctionCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuModuleNVX( device, module, pAllocator ); + } + + void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); + } + + void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); + } + + //=== VK_NVX_image_view_handle === + + uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandleNVX( device, pInfo ); + } + + VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); + } + + //=== VK_AMD_draw_indirect_count === + + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_AMD_shader_info === + + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + } + + //=== VK_KHR_dynamic_rendering === + + void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); + } + + void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderingKHR( commandBuffer ); + } + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + //=== VK_KHR_device_group === + + void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + } + + //=== VK_KHR_device_group_creation === + + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + //=== VK_KHR_external_memory_capabilities === + + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } + + VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); + } + + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); + } + + //=== VK_KHR_external_semaphore_capabilities === + + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + } + + VkResult + vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + } + + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_push_descriptor === + + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } + + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + //=== VK_EXT_conditional_rendering === + + void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + } + + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + //=== VK_NV_clip_space_w_scaling === + + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + } + + //=== VK_EXT_direct_mode_display === + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } + + VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + } + + //=== VK_EXT_display_control === + + VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + } + + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } + + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + } + + //=== VK_GOOGLE_display_timing === + + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + } + + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + } + + //=== VK_EXT_discard_rectangles === + + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } + + void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); + } + + void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); + } + + //=== VK_EXT_hdr_metadata === + + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } + + //=== VK_KHR_create_renderpass2 === + + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } + + //=== VK_KHR_shared_presentable_image === + + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } + + //=== VK_KHR_external_fence_capabilities === + + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } + + VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + } + + VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_performance_query === + + VkResult + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); + } + + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); + } + + VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireProfilingLockKHR( device, pInfo ); + } + + void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseProfilingLockKHR( device ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + } + + //=== VK_KHR_get_display_properties2 === + + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + } + + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + } + + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueEndDebugUtilsLabelEXT( queue ); + } + + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + } + + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } + + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } + + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + } + + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); + } + + VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); + } + + void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, scratch ); + } + + void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, countInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + } + + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + } + + //=== VK_KHR_get_memory_requirements2 === + + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_acceleration_structure === + + VkResult vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } + + void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); + } + + void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkDeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); + } + + VkResult vkBuildAccelerationStructuresKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); + } + + VkResult vkCopyAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } + + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } + + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, + const VkAccelerationStructureVersionInfoKHR * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); + } + + void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); + } + + //=== VK_KHR_ray_tracing_pipeline === + + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); + } + + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); + } + + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); + } + + void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + } + + //=== VK_KHR_bind_memory2 === + + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + //=== VK_EXT_image_drm_format_modifier === + + VkResult + vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + } + + //=== VK_EXT_validation_cache === + + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + } + + void + vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + } + + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } + + //=== VK_NV_shading_rate_image === + + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + } + + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } + + void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } + + //=== VK_NV_ray_tracing === + + VkResult vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + } + + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + } + + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + } + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCompileDeferredNV( device, pipeline, shader ); + } + + //=== VK_KHR_maintenance3 === + + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + } + + //=== VK_KHR_draw_indirect_count === + + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_EXT_external_memory_host === + + VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + } + + //=== VK_AMD_buffer_marker === + + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_NV_mesh_shader === + + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + } + + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_NV_scissor_exclusive === + + void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32 * pExclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); + } + + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + } + + void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_KHR_timeline_semaphore === + + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); + } + + //=== VK_INTEL_performance_query === + + VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } + + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } + + VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } + + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + } + + VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } + + VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); + } + + VkResult + vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + } + + //=== VK_AMD_display_native_hdr === + + void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); + } + + void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, + const VkExtent2D * pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); + } + + //=== VK_EXT_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + } + + //=== VK_EXT_tooling_info === + + VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); + } + + //=== VK_KHR_present_wait === + + VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); + } + + //=== VK_NV_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_NV_coverage_reduction_mode === + + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + } + + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressKHR( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); + } + + //=== VK_EXT_line_rasterization === + + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + //=== VK_KHR_deferred_host_operations === + + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } + + void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } + + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } + + VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } + + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } + + //=== VK_KHR_pipeline_executable_properties === + + VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } + + VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } + + VkResult + vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } + + //=== VK_EXT_host_image_copy === + + VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); + } + + VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); + } + + VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); + } + + VkResult + vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); + } + + void vkGetImageSubresourceLayout2EXT( VkDevice device, + VkImage image, + const VkImageSubresource2KHR * pSubresource, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); + } + + //=== VK_KHR_map_memory2 === + + VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); + } + + VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); + } + + //=== VK_EXT_swapchain_maintenance1 === + + VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); + } + + //=== VK_NV_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + } + + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } + + VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); + } + + //=== VK_EXT_depth_bias_control === + + void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); + } + + //=== VK_EXT_acquire_drm_display === + + VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); + } + + VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); + } + + //=== VK_EXT_private_data === + + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } + + //=== VK_KHR_video_encode_queue === + + VkResult + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); + } + + VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEncodedVideoSessionParametersKHR( device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData ); + } + + void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + VkResult vkCreateCudaModuleNV( VkDevice device, + const VkCudaModuleCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); + } + + VkResult vkCreateCudaFunctionNV( VkDevice device, + const VkCudaFunctionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaModuleNV( device, module, pAllocator ); + } + + void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); + } + + void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); + } + + void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); + } + + void vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } + + void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_EXT_descriptor_buffer === + + void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); + } + + void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); + } + + void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); + } + + void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); + } + + void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); + } + + VkResult + vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult + vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult + vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + //=== VK_NV_fragment_shading_rate_enums === + + void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); + } + + //=== VK_EXT_mesh_shader === + + void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_KHR_copy_commands2 === + + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); + } + + void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } + + void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } + + //=== VK_EXT_device_fault === + + VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); + } + + VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 + vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + + void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetVertexInputEXT( + commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } + + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + VkResult vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); + } + + VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); + } + + VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); + } + + VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); + } + + void vkDestroyBufferCollectionFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); + } + + VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + VkResult + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); + } + + void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); + } + + //=== VK_HUAWEI_invocation_mask === + + void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); + } + + //=== VK_NV_external_memory_rdma === + + VkResult vkGetMemoryRemoteAddressNV( VkDevice device, + const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); + } + + //=== VK_EXT_pipeline_properties === + + VkResult + vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT * pPipelineInfo, VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); + } + + //=== VK_EXT_extended_dynamic_state2 === + + void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); + } + + void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); + } + + void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VkResult vkCreateScreenSurfaceQNX( VkInstance instance, + const VkScreenSurfaceCreateInfoQNX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); + } + + //=== VK_KHR_ray_tracing_maintenance1 === + + void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); + } + + //=== VK_EXT_multi_draw === + + void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); + } + + void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); + } + + //=== VK_EXT_opacity_micromap === + + VkResult vkCreateMicromapEXT( VkDevice device, + const VkMicromapCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); + } + + void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); + } + + void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); + } + + VkResult vkBuildMicromapsEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); + } + + VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMicromapToMemoryEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToMicromapEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, + const VkMicromapVersionInfoEXT * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); + } + + void vkGetMicromapBuildSizesEXT( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT * pBuildInfo, + VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); + } + + //=== VK_HUAWEI_cluster_culling_shader === + + void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); + } + + //=== VK_EXT_pageable_device_local_memory === + + void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); + } + + //=== VK_KHR_maintenance4 === + + void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_VALVE_descriptor_set_host_mapping === + + void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, + const VkDescriptorSetBindingReferenceVALVE * pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); + } + + void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); + } + + //=== VK_NV_copy_memory_indirect === + + void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); + } + + void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); + } + + //=== VK_NV_memory_decompression === + + void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); + } + + void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, + const VkComputePipelineCreateInfo * pCreateInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); + } + + void + vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); + } + + VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); + } + + //=== VK_EXT_extended_dynamic_state3 === + + void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); + } + + void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); + } + + void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); + } + + void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); + } + + void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); + } + + void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); + } + + void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); + } + + void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); + } + + void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); + } + + void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); + } + + void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); + } + + void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); + } + + void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); + } + + void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); + } + + void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); + } + + void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); + } + + void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); + } + + void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); + } + + void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); + } + + void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); + } + + void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); + } + + void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); + } + + void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); + } + + void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); + } + + void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); + } + + void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); + } + + void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); + } + + void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); + } + + void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); + } + + void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); + } + + void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); + } + + //=== VK_EXT_shader_module_identifier === + + void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); + } + + void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); + } + + //=== VK_NV_optical_flow === + + VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); + } + + VkResult vkCreateOpticalFlowSessionNV( VkDevice device, + const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); + } + + void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); + } + + VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); + } + + void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); + } + + //=== VK_KHR_maintenance5 === + + void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); + } + + void vkGetRenderingAreaGranularityKHR( VkDevice device, + const VkRenderingAreaInfoKHR * pRenderingAreaInfo, + VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); + } + + void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, + const VkDeviceImageSubresourceInfoKHR * pInfo, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); + } + + void vkGetImageSubresourceLayout2KHR( VkDevice device, + VkImage image, + const VkImageSubresource2KHR * pSubresource, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); + } + + //=== VK_EXT_shader_object === + + VkResult vkCreateShadersEXT( VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); + } + + void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderEXT( device, shader, pAllocator ); + } + + VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); + } + + void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits * pStages, + const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); + } + + //=== VK_QCOM_tile_properties === + + VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, + VkFramebuffer framebuffer, + uint32_t * pPropertiesCount, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); + } + + VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, + const VkRenderingInfo * pRenderingInfo, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); + } + + //=== VK_NV_low_latency2 === + + VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); + } + + VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); + } + + void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); + } + + //=== VK_KHR_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, + const struct _screen_buffer * buffer, + VkScreenBufferPropertiesQNX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsKHR( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_KHR_maintenance6 === + + void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); + } + + void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); + } + + void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); + } + + void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); + } + + void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsets2EXT( commandBuffer, pSetDescriptorBufferOffsetsInfo ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo ); + } + }; + + inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic() + { + static ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic dls; + return dls; + } +#endif + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + struct AllocationCallbacks; + + template + class ObjectDestroy + { + public: + ObjectDestroy() = default; + + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + class NoParent; + + template + class ObjectDestroy + { + public: + ObjectDestroy() = default; + + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } + + private: + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectFree + { + public: + ObjectFree() = default; + + ObjectFree( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectRelease + { + public: + ObjectRelease() = default; + + ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.release( t, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Dispatch const * m_dispatch = nullptr; + }; + + template + class PoolFree + { + public: + PoolFree() = default; + + PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + ( m_owner.free )( m_pool, t, *m_dispatch ); + } + + private: + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; + }; + +#endif // !VULKAN_HPP_NO_SMART_HANDLE + + //================== + //=== BASE TYPEs === + //================== + + using Bool32 = uint32_t; + using DeviceAddress = uint64_t; + using DeviceSize = uint64_t; + using RemoteAddressNV = void *; + using SampleMask = uint32_t; + +} // namespace VULKAN_HPP_NAMESPACE + +#include +#if !defined( VULKAN_HPP_NO_TO_STRING ) +# include +#endif + +#ifndef VULKAN_HPP_NO_EXCEPTIONS +namespace std +{ + template <> + struct is_error_code_enum : public true_type + { + }; +} // namespace std +#endif + +namespace VULKAN_HPP_NAMESPACE +{ +#ifndef VULKAN_HPP_NO_EXCEPTIONS + class ErrorCategoryImpl : public std::error_category + { + public: + virtual const char * name() const VULKAN_HPP_NOEXCEPT override + { + return VULKAN_HPP_NAMESPACE_STRING "::Result"; + } + + virtual std::string message( int ev ) const override + { +# if defined( VULKAN_HPP_NO_TO_STRING ) + return std::to_string( ev ); +# else + return VULKAN_HPP_NAMESPACE::to_string( static_cast( ev ) ); +# endif + } + }; + + class Error + { + public: + Error() VULKAN_HPP_NOEXCEPT = default; + Error( const Error & ) VULKAN_HPP_NOEXCEPT = default; + virtual ~Error() VULKAN_HPP_NOEXCEPT = default; + + virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0; + }; + + class LogicError + : public Error + , public std::logic_error + { + public: + explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {} + + explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::logic_error::what(); + } + }; + + class SystemError + : public Error + , public std::system_error + { + public: + SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {} + + SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {} + + SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {} + + SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {} + + SystemError( int ev, std::error_category const & ecat, std::string const & what ) : Error(), std::system_error( ev, ecat, what ) {} + + SystemError( int ev, std::error_category const & ecat, char const * what ) : Error(), std::system_error( ev, ecat, what ) {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::system_error::what(); + } + }; + + VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT + { + static ErrorCategoryImpl instance; + return instance; + } + + VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT + { + return std::error_code( static_cast( e ), errorCategory() ); + } + + VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT + { + return std::error_condition( static_cast( e ), errorCategory() ); + } + + class OutOfHostMemoryError : public SystemError + { + public: + OutOfHostMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + + OutOfHostMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + }; + + class OutOfDeviceMemoryError : public SystemError + { + public: + OutOfDeviceMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + + OutOfDeviceMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + }; + + class InitializationFailedError : public SystemError + { + public: + InitializationFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + + InitializationFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + }; + + class DeviceLostError : public SystemError + { + public: + DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + + DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + }; + + class MemoryMapFailedError : public SystemError + { + public: + MemoryMapFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + + MemoryMapFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + }; + + class LayerNotPresentError : public SystemError + { + public: + LayerNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + + LayerNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + }; + + class ExtensionNotPresentError : public SystemError + { + public: + ExtensionNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + + ExtensionNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + }; + + class FeatureNotPresentError : public SystemError + { + public: + FeatureNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + + FeatureNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + }; + + class IncompatibleDriverError : public SystemError + { + public: + IncompatibleDriverError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + + IncompatibleDriverError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + }; + + class TooManyObjectsError : public SystemError + { + public: + TooManyObjectsError( std::string const & message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + + TooManyObjectsError( char const * message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + }; + + class FormatNotSupportedError : public SystemError + { + public: + FormatNotSupportedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + + FormatNotSupportedError( char const * message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + }; + + class FragmentedPoolError : public SystemError + { + public: + FragmentedPoolError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + + FragmentedPoolError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + }; + + class UnknownError : public SystemError + { + public: + UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + + UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + }; + + class OutOfPoolMemoryError : public SystemError + { + public: + OutOfPoolMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + + OutOfPoolMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + }; + + class InvalidExternalHandleError : public SystemError + { + public: + InvalidExternalHandleError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + + InvalidExternalHandleError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + }; + + class FragmentationError : public SystemError + { + public: + FragmentationError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + + FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + }; + + class InvalidOpaqueCaptureAddressError : public SystemError + { + public: + InvalidOpaqueCaptureAddressError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + + InvalidOpaqueCaptureAddressError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + }; + + class SurfaceLostKHRError : public SystemError + { + public: + SurfaceLostKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + + SurfaceLostKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + }; + + class NativeWindowInUseKHRError : public SystemError + { + public: + NativeWindowInUseKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + + NativeWindowInUseKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + }; + + class OutOfDateKHRError : public SystemError + { + public: + OutOfDateKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + + OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + }; + + class IncompatibleDisplayKHRError : public SystemError + { + public: + IncompatibleDisplayKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + + IncompatibleDisplayKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + }; + + class ValidationFailedEXTError : public SystemError + { + public: + ValidationFailedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + + ValidationFailedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + }; + + class InvalidShaderNVError : public SystemError + { + public: + InvalidShaderNVError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + + InvalidShaderNVError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + }; + + class ImageUsageNotSupportedKHRError : public SystemError + { + public: + ImageUsageNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {} + + ImageUsageNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {} + }; + + class VideoPictureLayoutNotSupportedKHRError : public SystemError + { + public: + VideoPictureLayoutNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) + { + } + + VideoPictureLayoutNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileOperationNotSupportedKHRError : public SystemError + { + public: + VideoProfileOperationNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) + { + } + + VideoProfileOperationNotSupportedKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileFormatNotSupportedKHRError : public SystemError + { + public: + VideoProfileFormatNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) + { + } + + VideoProfileFormatNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileCodecNotSupportedKHRError : public SystemError + { + public: + VideoProfileCodecNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) + { + } + + VideoProfileCodecNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {} + }; + + class VideoStdVersionNotSupportedKHRError : public SystemError + { + public: + VideoStdVersionNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) + { + } + + VideoStdVersionNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {} + }; + + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError + { + public: + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + { + } + + InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + { + } + }; + + class NotPermittedKHRError : public SystemError + { + public: + NotPermittedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {} + + NotPermittedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {} + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + class FullScreenExclusiveModeLostEXTError : public SystemError + { + public: + FullScreenExclusiveModeLostEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + { + } + + FullScreenExclusiveModeLostEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + class InvalidVideoStdParametersKHRError : public SystemError + { + public: + InvalidVideoStdParametersKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidVideoStdParametersKHR ), message ) {} + + InvalidVideoStdParametersKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidVideoStdParametersKHR ), message ) {} + }; + + class CompressionExhaustedEXTError : public SystemError + { + public: + CompressionExhaustedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} + + CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} + }; + + class IncompatibleShaderBinaryEXTError : public SystemError + { + public: + IncompatibleShaderBinaryEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {} + + IncompatibleShaderBinaryEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {} + }; + + namespace detail + { + [[noreturn]] VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) + { + switch ( result ) + { + case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message ); + case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message ); + case Result::eErrorInitializationFailed: throw InitializationFailedError( message ); + case Result::eErrorDeviceLost: throw DeviceLostError( message ); + case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message ); + case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message ); + case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message ); + case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message ); + case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message ); + case Result::eErrorTooManyObjects: throw TooManyObjectsError( message ); + case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message ); + case Result::eErrorFragmentedPool: throw FragmentedPoolError( message ); + case Result::eErrorUnknown: throw UnknownError( message ); + case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message ); + case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); + case Result::eErrorFragmentation: throw FragmentationError( message ); + case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message ); + case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); + case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); + case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); + case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); + case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); + case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); + case Result::eErrorImageUsageNotSupportedKHR: throw ImageUsageNotSupportedKHRError( message ); + case Result::eErrorVideoPictureLayoutNotSupportedKHR: throw VideoPictureLayoutNotSupportedKHRError( message ); + case Result::eErrorVideoProfileOperationNotSupportedKHR: throw VideoProfileOperationNotSupportedKHRError( message ); + case Result::eErrorVideoProfileFormatNotSupportedKHR: throw VideoProfileFormatNotSupportedKHRError( message ); + case Result::eErrorVideoProfileCodecNotSupportedKHR: throw VideoProfileCodecNotSupportedKHRError( message ); + case Result::eErrorVideoStdVersionNotSupportedKHR: throw VideoStdVersionNotSupportedKHRError( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); + case Result::eErrorNotPermittedKHR: throw NotPermittedKHRError( message ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message ); + case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message ); + case Result::eErrorIncompatibleShaderBinaryEXT: throw IncompatibleShaderBinaryEXTError( message ); + default: throw SystemError( make_error_code( result ), message ); + } + } + } // namespace detail +#endif + + template + void ignore( T const & ) VULKAN_HPP_NOEXCEPT + { + } + + template + struct ResultValue + { +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) ) +#else + ResultValue( Result r, T & v ) +#endif + : result( r ), value( v ) + { + } + +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) ) +#else + ResultValue( Result r, T && v ) +#endif + : result( r ), value( std::move( v ) ) + { + } + + Result result; + T value; + + operator std::tuple() VULKAN_HPP_NOEXCEPT + { + return std::tuple( result, value ); + } + }; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + struct ResultValue> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, UniqueHandle && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, UniqueHandle && v ) +# endif + : result( r ) + , value( std::move( v ) ) + { + } + + VULKAN_HPP_DEPRECATED( + "asTuple() on an l-value is deprecated, as it implicitly moves the UniqueHandle out of the ResultValue. Use asTuple() on an r-value instead, requiring to explicitly move the UniqueHandle." ) + + std::tuple> asTuple() & + { + return std::make_tuple( result, std::move( value ) ); + } + + std::tuple> asTuple() && + { + return std::make_tuple( result, std::move( value ) ); + } + + Result result; + UniqueHandle value; + }; + + template + struct ResultValue>> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, std::vector> && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, std::vector> && v ) +# endif + : result( r ) + , value( std::move( v ) ) + { + } + + VULKAN_HPP_DEPRECATED( + "asTuple() on an l-value is deprecated, as it implicitly moves the UniqueHandle out of the ResultValue. Use asTuple() on an r-value instead, requiring to explicitly move the UniqueHandle." ) + + std::tuple>> asTuple() & + { + return std::make_tuple( result, std::move( value ) ); + } + + std::tuple>> asTuple() && + { + return std::make_tuple( result, std::move( value ) ); + } + + Result result; + std::vector> value; + }; +#endif + + template + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef ResultValue type; +#else + typedef T type; +#endif + }; + + template <> + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef Result type; +#else + typedef void type; +#endif + }; + + VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return result; +#else + ignore( result ); +#endif + } + + template + VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result, T & data ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return ResultValue( result, data ); +#else + ignore( result ); + return data; +#endif + } + + template + VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result, T && data ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return ResultValue( result, std::move( data ) ); +#else + ignore( result ); + return std::move( data ); +#endif + } + + VULKAN_HPP_INLINE void resultCheck( Result result, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); +#else + if ( result != Result::eSuccess ) + { + detail::throwResultException( result, message ); + } +#endif + } + + VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + ignore( message ); + ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + detail::throwResultException( result, message ); + } +#endif + } + + //=========================== + //=== CONSTEXPR CONSTANTs === + //=========================== + + //=== VK_VERSION_1_0 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE; + VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE; + VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS; + + //=== VK_VERSION_1_1 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL; + + //=== VK_VERSION_1_2 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; + + //=== VK_KHR_device_group_creation === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSizeKHR = VK_MAX_DEVICE_GROUP_SIZE_KHR; + + //=== VK_KHR_external_memory_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSizeKHR = VK_LUID_SIZE_KHR; + + //=== VK_KHR_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternalKHR = VK_QUEUE_FAMILY_EXTERNAL_KHR; + + //=== VK_EXT_queue_family_foreign === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyForeignEXT = VK_QUEUE_FAMILY_FOREIGN_EXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderIndexUnusedAMDX = VK_SHADER_INDEX_UNUSED_AMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_ray_tracing_pipeline === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKHR = VK_SHADER_UNUSED_KHR; + + //=== VK_NV_ray_tracing === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedNV = VK_SHADER_UNUSED_NV; + + //=== VK_KHR_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKHR = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; + + //=== VK_KHR_driver_properties === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSizeKHR = VK_MAX_DRIVER_NAME_SIZE_KHR; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSizeKHR = VK_MAX_DRIVER_INFO_SIZE_KHR; + + //=== VK_EXT_global_priority_query === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeEXT = VK_MAX_GLOBAL_PRIORITY_SIZE_EXT; + + //=== VK_EXT_image_sliced_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesEXT = VK_REMAINING_3D_SLICES_EXT; + + //=== VK_EXT_shader_module_identifier === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeEXT = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; + + //======================== + //=== CONSTEXPR VALUEs === + //======================== + VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; + + //========================= + //=== CONSTEXPR CALLEEs === + //========================= + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionMajor( T const version ) + { + return ( ( (uint32_t)( version ) >> 22U ) & 0x7FU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionMinor( T const version ) + { + return ( ( (uint32_t)( version ) >> 12U ) & 0x3FFU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionPatch( T const version ) + { + return ( (uint32_t)(version)&0xFFFU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionVariant( T const version ) + { + return ( (uint32_t)( version ) >> 29U ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t makeApiVersion( T const variant, T const major, T const minor, T const patch ) + { + return ( ( ( (uint32_t)( variant ) ) << 29U ) | ( ( (uint32_t)( major ) ) << 22U ) | ( ( (uint32_t)( minor ) ) << 12U ) | ( (uint32_t)( patch ) ) ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_MAKE_API_VERSION should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t makeVersion( T const major, T const minor, T const patch ) + { + return ( ( ( (uint32_t)( major ) ) << 22U ) | ( ( (uint32_t)( minor ) ) << 12U ) | ( (uint32_t)( patch ) ) ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_MAJOR should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionMajor( T const version ) + { + return ( (uint32_t)( version ) >> 22U ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_MINOR should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionMinor( T const version ) + { + return ( ( (uint32_t)( version ) >> 12U ) & 0x3FFU ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_PATCH should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionPatch( T const version ) + { + return ( (uint32_t)(version)&0xFFFU ); + } + + //========================= + //=== CONSTEXPR CALLERs === + //========================= + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto HeaderVersionComplete = makeApiVersion( 0, 1, 3, VK_HEADER_VERSION ); + + //================================= + //=== CONSTEXPR EXTENSION NAMEs === + //================================= + + //=== VK_KHR_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceExtensionName = VK_KHR_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceSpecVersion = VK_KHR_SURFACE_SPEC_VERSION; + + //=== VK_KHR_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainExtensionName = VK_KHR_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainSpecVersion = VK_KHR_SWAPCHAIN_SPEC_VERSION; + + //=== VK_KHR_display === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplayExtensionName = VK_KHR_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySpecVersion = VK_KHR_DISPLAY_SPEC_VERSION; + + //=== VK_KHR_display_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySwapchainExtensionName = VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySwapchainSpecVersion = VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXlibSurfaceExtensionName = VK_KHR_XLIB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXlibSurfaceSpecVersion = VK_KHR_XLIB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXcbSurfaceExtensionName = VK_KHR_XCB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXcbSurfaceSpecVersion = VK_KHR_XCB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWaylandSurfaceExtensionName = VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWaylandSurfaceSpecVersion = VK_KHR_WAYLAND_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAndroidSurfaceExtensionName = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAndroidSurfaceSpecVersion = VK_KHR_ANDROID_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32SurfaceExtensionName = VK_KHR_WIN32_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32SurfaceSpecVersion = VK_KHR_WIN32_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_report extension has been deprecated by VK_EXT_debug_utils." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugReportExtensionName = VK_EXT_DEBUG_REPORT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_report extension has been deprecated by VK_EXT_debug_utils." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugReportSpecVersion = VK_EXT_DEBUG_REPORT_SPEC_VERSION; + + //=== VK_NV_glsl_shader === + VULKAN_HPP_DEPRECATED( "The VK_NV_glsl_shader extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVGlslShaderExtensionName = VK_NV_GLSL_SHADER_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_glsl_shader extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVGlslShaderSpecVersion = VK_NV_GLSL_SHADER_SPEC_VERSION; + + //=== VK_EXT_depth_range_unrestricted === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthRangeUnrestrictedExtensionName = VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthRangeUnrestrictedSpecVersion = VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION; + + //=== VK_KHR_sampler_mirror_clamp_to_edge === + VULKAN_HPP_DEPRECATED( "The VK_KHR_sampler_mirror_clamp_to_edge extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeExtensionName = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_sampler_mirror_clamp_to_edge extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeSpecVersion = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION; + + //=== VK_IMG_filter_cubic === + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFilterCubicExtensionName = VK_IMG_FILTER_CUBIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFilterCubicSpecVersion = VK_IMG_FILTER_CUBIC_SPEC_VERSION; + + //=== VK_AMD_rasterization_order === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDRasterizationOrderExtensionName = VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDRasterizationOrderSpecVersion = VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION; + + //=== VK_AMD_shader_trinary_minmax === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderTrinaryMinmaxExtensionName = VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderTrinaryMinmaxSpecVersion = VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION; + + //=== VK_AMD_shader_explicit_vertex_parameter === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderExplicitVertexParameterExtensionName = VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderExplicitVertexParameterSpecVersion = VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION; + + //=== VK_EXT_debug_marker === + VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_marker extension has been promoted to VK_EXT_debug_utils." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerExtensionName = VK_EXT_DEBUG_MARKER_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_marker extension has been promoted to VK_EXT_debug_utils." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerSpecVersion = VK_EXT_DEBUG_MARKER_SPEC_VERSION; + + //=== VK_KHR_video_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoQueueExtensionName = VK_KHR_VIDEO_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoQueueSpecVersion = VK_KHR_VIDEO_QUEUE_SPEC_VERSION; + + //=== VK_KHR_video_decode_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeQueueExtensionName = VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeQueueSpecVersion = VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION; + + //=== VK_AMD_gcn_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGcnShaderExtensionName = VK_AMD_GCN_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGcnShaderSpecVersion = VK_AMD_GCN_SHADER_SPEC_VERSION; + + //=== VK_NV_dedicated_allocation === + VULKAN_HPP_DEPRECATED( "The VK_NV_dedicated_allocation extension has been deprecated by VK_KHR_dedicated_allocation." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationExtensionName = VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_dedicated_allocation extension has been deprecated by VK_KHR_dedicated_allocation." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationSpecVersion = VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION; + + //=== VK_EXT_transform_feedback === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTransformFeedbackExtensionName = VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTransformFeedbackSpecVersion = VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION; + + //=== VK_NVX_binary_import === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXBinaryImportExtensionName = VK_NVX_BINARY_IMPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXBinaryImportSpecVersion = VK_NVX_BINARY_IMPORT_SPEC_VERSION; + + //=== VK_NVX_image_view_handle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXImageViewHandleExtensionName = VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXImageViewHandleSpecVersion = VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION; + + //=== VK_AMD_draw_indirect_count === + VULKAN_HPP_DEPRECATED( "The VK_AMD_draw_indirect_count extension has been promoted to VK_KHR_draw_indirect_count." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountExtensionName = VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_draw_indirect_count extension has been promoted to VK_KHR_draw_indirect_count." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountSpecVersion = VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION; + + //=== VK_AMD_negative_viewport_height === + VULKAN_HPP_DEPRECATED( "The VK_AMD_negative_viewport_height extension has been obsoleted by VK_KHR_maintenance1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDNegativeViewportHeightExtensionName = VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_negative_viewport_height extension has been obsoleted by VK_KHR_maintenance1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDNegativeViewportHeightSpecVersion = VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION; + + //=== VK_AMD_gpu_shader_half_float === + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_half_float extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderHalfFloatExtensionName = VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_half_float extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderHalfFloatSpecVersion = VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION; + + //=== VK_AMD_shader_ballot === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderBallotExtensionName = VK_AMD_SHADER_BALLOT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderBallotSpecVersion = VK_AMD_SHADER_BALLOT_SPEC_VERSION; + + //=== VK_KHR_video_encode_h264 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH264ExtensionName = VK_KHR_VIDEO_ENCODE_H264_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH264SpecVersion = VK_KHR_VIDEO_ENCODE_H264_SPEC_VERSION; + + //=== VK_KHR_video_encode_h265 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH265ExtensionName = VK_KHR_VIDEO_ENCODE_H265_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH265SpecVersion = VK_KHR_VIDEO_ENCODE_H265_SPEC_VERSION; + + //=== VK_KHR_video_decode_h264 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH264ExtensionName = VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH264SpecVersion = VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION; + + //=== VK_AMD_texture_gather_bias_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDTextureGatherBiasLodExtensionName = VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDTextureGatherBiasLodSpecVersion = VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION; + + //=== VK_AMD_shader_info === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderInfoExtensionName = VK_AMD_SHADER_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderInfoSpecVersion = VK_AMD_SHADER_INFO_SPEC_VERSION; + + //=== VK_KHR_dynamic_rendering === + VULKAN_HPP_DEPRECATED( "The VK_KHR_dynamic_rendering extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingExtensionName = VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_dynamic_rendering extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingSpecVersion = VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION; + + //=== VK_AMD_shader_image_load_store_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderImageLoadStoreLodExtensionName = VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderImageLoadStoreLodSpecVersion = VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto GGPStreamDescriptorSurfaceExtensionName = VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GGPStreamDescriptorSurfaceSpecVersion = VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCornerSampledImageExtensionName = VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCornerSampledImageSpecVersion = VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION; + + //=== VK_KHR_multiview === + VULKAN_HPP_DEPRECATED( "The VK_KHR_multiview extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewExtensionName = VK_KHR_MULTIVIEW_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_multiview extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewSpecVersion = VK_KHR_MULTIVIEW_SPEC_VERSION; + + //=== VK_IMG_format_pvrtc === + VULKAN_HPP_DEPRECATED( "The VK_IMG_format_pvrtc extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFormatPvrtcExtensionName = VK_IMG_FORMAT_PVRTC_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_IMG_format_pvrtc extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFormatPvrtcSpecVersion = VK_IMG_FORMAT_PVRTC_SPEC_VERSION; + + //=== VK_NV_external_memory_capabilities === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_capabilities extension has been deprecated by VK_KHR_external_memory_capabilities." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryCapabilitiesExtensionName = VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_capabilities extension has been deprecated by VK_KHR_external_memory_capabilities." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryCapabilitiesSpecVersion = VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; + + //=== VK_NV_external_memory === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory extension has been deprecated by VK_KHR_external_memory." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryExtensionName = VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory extension has been deprecated by VK_KHR_external_memory." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemorySpecVersion = VK_NV_EXTERNAL_MEMORY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_win32 extension has been deprecated by VK_KHR_external_memory_win32." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryWin32ExtensionName = VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_win32 extension has been deprecated by VK_KHR_external_memory_win32." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryWin32SpecVersion = VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + VULKAN_HPP_DEPRECATED( "The VK_NV_win32_keyed_mutex extension has been promoted to VK_KHR_win32_keyed_mutex." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexExtensionName = VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_win32_keyed_mutex extension has been promoted to VK_KHR_win32_keyed_mutex." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexSpecVersion = VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_get_physical_device_properties2 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2ExtensionName = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_get_physical_device_properties2 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2SpecVersion = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION; + + //=== VK_KHR_device_group === + VULKAN_HPP_DEPRECATED( "The VK_KHR_device_group extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupExtensionName = VK_KHR_DEVICE_GROUP_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_device_group extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupSpecVersion = VK_KHR_DEVICE_GROUP_SPEC_VERSION; + + //=== VK_EXT_validation_flags === + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_flags extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFlagsExtensionName = VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_flags extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFlagsSpecVersion = VK_EXT_VALIDATION_FLAGS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto NNViSurfaceExtensionName = VK_NN_VI_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NNViSurfaceSpecVersion = VK_NN_VI_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_shader_draw_parameters === + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_draw_parameters extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersExtensionName = VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_draw_parameters extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersSpecVersion = VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION; + + //=== VK_EXT_shader_subgroup_ballot === + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_ballot extension has been deprecated by VK_VERSION_1_2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupBallotExtensionName = VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_ballot extension has been deprecated by VK_VERSION_1_2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupBallotSpecVersion = VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION; + + //=== VK_EXT_shader_subgroup_vote === + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_vote extension has been deprecated by VK_VERSION_1_1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupVoteExtensionName = VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_vote extension has been deprecated by VK_VERSION_1_1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupVoteSpecVersion = VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION; + + //=== VK_EXT_texture_compression_astc_hdr === + VULKAN_HPP_DEPRECATED( "The VK_EXT_texture_compression_astc_hdr extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrExtensionName = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_texture_compression_astc_hdr extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrSpecVersion = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION; + + //=== VK_EXT_astc_decode_mode === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAstcDecodeModeExtensionName = VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAstcDecodeModeSpecVersion = VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION; + + //=== VK_EXT_pipeline_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineRobustnessExtensionName = VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineRobustnessSpecVersion = VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION; + + //=== VK_KHR_maintenance1 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance1 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1ExtensionName = VK_KHR_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance1 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1SpecVersion = VK_KHR_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_KHR_device_group_creation === + VULKAN_HPP_DEPRECATED( "The VK_KHR_device_group_creation extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationExtensionName = VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_device_group_creation extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationSpecVersion = VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION; + + //=== VK_KHR_external_memory_capabilities === + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_memory_capabilities extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesExtensionName = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_memory_capabilities extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesSpecVersion = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_memory === + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_memory extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryExtensionName = VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_memory extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemorySpecVersion = VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryWin32ExtensionName = VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryWin32SpecVersion = VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryFdExtensionName = VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryFdSpecVersion = VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32KeyedMutexExtensionName = VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32KeyedMutexSpecVersion = VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_capabilities === + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_semaphore_capabilities extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_semaphore_capabilities extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_semaphore === + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_semaphore extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_semaphore extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreWin32ExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreWin32SpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreFdExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreFdSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION; + + //=== VK_KHR_push_descriptor === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPushDescriptorExtensionName = VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPushDescriptorSpecVersion = VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION; + + //=== VK_EXT_conditional_rendering === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConditionalRenderingExtensionName = VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConditionalRenderingSpecVersion = VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION; + + //=== VK_KHR_shader_float16_int8 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_float16_int8 extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8ExtensionName = VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_float16_int8 extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8SpecVersion = VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION; + + //=== VK_KHR_16bit_storage === + VULKAN_HPP_DEPRECATED( "The VK_KHR_16bit_storage extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageExtensionName = VK_KHR_16BIT_STORAGE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_16bit_storage extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageSpecVersion = VK_KHR_16BIT_STORAGE_SPEC_VERSION; + + //=== VK_KHR_incremental_present === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentExtensionName = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentSpecVersion = VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION; + + //=== VK_KHR_descriptor_update_template === + VULKAN_HPP_DEPRECATED( "The VK_KHR_descriptor_update_template extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateExtensionName = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_descriptor_update_template extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateSpecVersion = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION; + + //=== VK_NV_clip_space_w_scaling === + VULKAN_HPP_CONSTEXPR_INLINE auto NVClipSpaceWScalingExtensionName = VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVClipSpaceWScalingSpecVersion = VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION; + + //=== VK_EXT_direct_mode_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectModeDisplayExtensionName = VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectModeDisplaySpecVersion = VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireXlibDisplayExtensionName = VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireXlibDisplaySpecVersion = VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplaySurfaceCounterExtensionName = VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplaySurfaceCounterSpecVersion = VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION; + + //=== VK_EXT_display_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplayControlExtensionName = VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplayControlSpecVersion = VK_EXT_DISPLAY_CONTROL_SPEC_VERSION; + + //=== VK_GOOGLE_display_timing === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDisplayTimingExtensionName = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDisplayTimingSpecVersion = VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION; + + //=== VK_NV_sample_mask_override_coverage === + VULKAN_HPP_CONSTEXPR_INLINE auto NVSampleMaskOverrideCoverageExtensionName = VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVSampleMaskOverrideCoverageSpecVersion = VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION; + + //=== VK_NV_geometry_shader_passthrough === + VULKAN_HPP_CONSTEXPR_INLINE auto NVGeometryShaderPassthroughExtensionName = VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVGeometryShaderPassthroughSpecVersion = VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION; + + //=== VK_NV_viewport_array2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportArray2ExtensionName = VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportArray2SpecVersion = VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION; + + //=== VK_NVX_multiview_per_view_attributes === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXMultiviewPerViewAttributesExtensionName = VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXMultiviewPerViewAttributesSpecVersion = VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION; + + //=== VK_NV_viewport_swizzle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportSwizzleExtensionName = VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportSwizzleSpecVersion = VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION; + + //=== VK_EXT_discard_rectangles === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDiscardRectanglesExtensionName = VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDiscardRectanglesSpecVersion = VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION; + + //=== VK_EXT_conservative_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConservativeRasterizationExtensionName = VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConservativeRasterizationSpecVersion = VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION; + + //=== VK_EXT_depth_clip_enable === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipEnableExtensionName = VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipEnableSpecVersion = VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION; + + //=== VK_EXT_swapchain_colorspace === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainColorSpaceExtensionName = VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainColorSpaceSpecVersion = VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION; + + //=== VK_EXT_hdr_metadata === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHdrMetadataExtensionName = VK_EXT_HDR_METADATA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHdrMetadataSpecVersion = VK_EXT_HDR_METADATA_SPEC_VERSION; + + //=== VK_KHR_imageless_framebuffer === + VULKAN_HPP_DEPRECATED( "The VK_KHR_imageless_framebuffer extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferExtensionName = VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_imageless_framebuffer extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferSpecVersion = VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION; + + //=== VK_KHR_create_renderpass2 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_create_renderpass2 extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2ExtensionName = VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_create_renderpass2 extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2SpecVersion = VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION; + + //=== VK_IMG_relaxed_line_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto IMGRelaxedLineRasterizationExtensionName = VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto IMGRelaxedLineRasterizationSpecVersion = VK_IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION; + + //=== VK_KHR_shared_presentable_image === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSharedPresentableImageExtensionName = VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSharedPresentableImageSpecVersion = VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION; + + //=== VK_KHR_external_fence_capabilities === + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_fence_capabilities extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesExtensionName = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_fence_capabilities extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesSpecVersion = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_fence === + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_fence extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceExtensionName = VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_external_fence extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceSpecVersion = VK_KHR_EXTERNAL_FENCE_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceWin32ExtensionName = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceWin32SpecVersion = VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceFdExtensionName = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceFdSpecVersion = VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION; + + //=== VK_KHR_performance_query === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPerformanceQueryExtensionName = VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPerformanceQuerySpecVersion = VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION; + + //=== VK_KHR_maintenance2 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance2 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2ExtensionName = VK_KHR_MAINTENANCE_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance2 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2SpecVersion = VK_KHR_MAINTENANCE_2_SPEC_VERSION; + + //=== VK_KHR_get_surface_capabilities2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2ExtensionName = VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2SpecVersion = VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION; + + //=== VK_KHR_variable_pointers === + VULKAN_HPP_DEPRECATED( "The VK_KHR_variable_pointers extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersExtensionName = VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_variable_pointers extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersSpecVersion = VK_KHR_VARIABLE_POINTERS_SPEC_VERSION; + + //=== VK_KHR_get_display_properties2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetDisplayProperties2ExtensionName = VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetDisplayProperties2SpecVersion = VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + VULKAN_HPP_DEPRECATED( "The VK_MVK_ios_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKIosSurfaceExtensionName = VK_MVK_IOS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_MVK_ios_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKIosSurfaceSpecVersion = VK_MVK_IOS_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + VULKAN_HPP_DEPRECATED( "The VK_MVK_macos_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKMacosSurfaceExtensionName = VK_MVK_MACOS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_MVK_macos_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKMacosSurfaceSpecVersion = VK_MVK_MACOS_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_external_memory_dma_buf === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryDmaBufExtensionName = VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryDmaBufSpecVersion = VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION; + + //=== VK_EXT_queue_family_foreign === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTQueueFamilyForeignExtensionName = VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTQueueFamilyForeignSpecVersion = VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION; + + //=== VK_KHR_dedicated_allocation === + VULKAN_HPP_DEPRECATED( "The VK_KHR_dedicated_allocation extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationExtensionName = VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_dedicated_allocation extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationSpecVersion = VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION; + + //=== VK_EXT_debug_utils === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugUtilsExtensionName = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugUtilsSpecVersion = VK_EXT_DEBUG_UTILS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalMemoryAndroidHardwareBufferExtensionName = VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalMemoryAndroidHardwareBufferSpecVersion = VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sampler_filter_minmax === + VULKAN_HPP_DEPRECATED( "The VK_EXT_sampler_filter_minmax extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxExtensionName = VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_sampler_filter_minmax extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxSpecVersion = VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION; + + //=== VK_KHR_storage_buffer_storage_class === + VULKAN_HPP_DEPRECATED( "The VK_KHR_storage_buffer_storage_class extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassExtensionName = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_storage_buffer_storage_class extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassSpecVersion = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION; + + //=== VK_AMD_gpu_shader_int16 === + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_int16 extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderInt16ExtensionName = VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_int16 extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderInt16SpecVersion = VK_AMD_GPU_SHADER_INT16_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDXShaderEnqueueExtensionName = VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDXShaderEnqueueSpecVersion = VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMixedAttachmentSamplesExtensionName = VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMixedAttachmentSamplesSpecVersion = VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION; + + //=== VK_AMD_shader_fragment_mask === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderFragmentMaskExtensionName = VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderFragmentMaskSpecVersion = VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION; + + //=== VK_EXT_inline_uniform_block === + VULKAN_HPP_DEPRECATED( "The VK_EXT_inline_uniform_block extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockExtensionName = VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_inline_uniform_block extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockSpecVersion = VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION; + + //=== VK_EXT_shader_stencil_export === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderStencilExportExtensionName = VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderStencilExportSpecVersion = VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION; + + //=== VK_EXT_sample_locations === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSampleLocationsExtensionName = VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSampleLocationsSpecVersion = VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION; + + //=== VK_KHR_relaxed_block_layout === + VULKAN_HPP_DEPRECATED( "The VK_KHR_relaxed_block_layout extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutExtensionName = VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_relaxed_block_layout extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutSpecVersion = VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION; + + //=== VK_KHR_get_memory_requirements2 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_get_memory_requirements2 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2ExtensionName = VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_get_memory_requirements2 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2SpecVersion = VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION; + + //=== VK_KHR_image_format_list === + VULKAN_HPP_DEPRECATED( "The VK_KHR_image_format_list extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListExtensionName = VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_image_format_list extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListSpecVersion = VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION; + + //=== VK_EXT_blend_operation_advanced === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBlendOperationAdvancedExtensionName = VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBlendOperationAdvancedSpecVersion = VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION; + + //=== VK_NV_fragment_coverage_to_color === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentCoverageToColorExtensionName = VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentCoverageToColorSpecVersion = VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION; + + //=== VK_KHR_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAccelerationStructureExtensionName = VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAccelerationStructureSpecVersion = VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_pipeline === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPipelineExtensionName = VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPipelineSpecVersion = VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION; + + //=== VK_KHR_ray_query === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayQueryExtensionName = VK_KHR_RAY_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayQuerySpecVersion = VK_KHR_RAY_QUERY_SPEC_VERSION; + + //=== VK_NV_framebuffer_mixed_samples === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFramebufferMixedSamplesExtensionName = VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFramebufferMixedSamplesSpecVersion = VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION; + + //=== VK_NV_fill_rectangle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFillRectangleExtensionName = VK_NV_FILL_RECTANGLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFillRectangleSpecVersion = VK_NV_FILL_RECTANGLE_SPEC_VERSION; + + //=== VK_NV_shader_sm_builtins === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSmBuiltinsExtensionName = VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSmBuiltinsSpecVersion = VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION; + + //=== VK_EXT_post_depth_coverage === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPostDepthCoverageExtensionName = VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPostDepthCoverageSpecVersion = VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION; + + //=== VK_KHR_sampler_ycbcr_conversion === + VULKAN_HPP_DEPRECATED( "The VK_KHR_sampler_ycbcr_conversion extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionExtensionName = VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_sampler_ycbcr_conversion extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionSpecVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION; + + //=== VK_KHR_bind_memory2 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_bind_memory2 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2ExtensionName = VK_KHR_BIND_MEMORY_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_bind_memory2 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2SpecVersion = VK_KHR_BIND_MEMORY_2_SPEC_VERSION; + + //=== VK_EXT_image_drm_format_modifier === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageDrmFormatModifierExtensionName = VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageDrmFormatModifierSpecVersion = VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION; + + //=== VK_EXT_validation_cache === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationCacheExtensionName = VK_EXT_VALIDATION_CACHE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationCacheSpecVersion = VK_EXT_VALIDATION_CACHE_SPEC_VERSION; + + //=== VK_EXT_descriptor_indexing === + VULKAN_HPP_DEPRECATED( "The VK_EXT_descriptor_indexing extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingExtensionName = VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_descriptor_indexing extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingSpecVersion = VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION; + + //=== VK_EXT_shader_viewport_index_layer === + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_viewport_index_layer extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerExtensionName = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_viewport_index_layer extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerSpecVersion = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilitySubsetExtensionName = VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilitySubsetSpecVersion = VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShadingRateImageExtensionName = VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShadingRateImageSpecVersion = VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION; + + //=== VK_NV_ray_tracing === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingExtensionName = VK_NV_RAY_TRACING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingSpecVersion = VK_NV_RAY_TRACING_SPEC_VERSION; + + //=== VK_NV_representative_fragment_test === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestExtensionName = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestSpecVersion = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION; + + //=== VK_KHR_maintenance3 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance3 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3ExtensionName = VK_KHR_MAINTENANCE_3_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance3 extension has been promoted to core in version 1.1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3SpecVersion = VK_KHR_MAINTENANCE_3_SPEC_VERSION; + + //=== VK_KHR_draw_indirect_count === + VULKAN_HPP_DEPRECATED( "The VK_KHR_draw_indirect_count extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountExtensionName = VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_draw_indirect_count extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountSpecVersion = VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION; + + //=== VK_EXT_filter_cubic === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFilterCubicExtensionName = VK_EXT_FILTER_CUBIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFilterCubicSpecVersion = VK_EXT_FILTER_CUBIC_SPEC_VERSION; + + //=== VK_QCOM_render_pass_shader_resolve === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveExtensionName = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveSpecVersion = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION; + + //=== VK_EXT_global_priority === + VULKAN_HPP_DEPRECATED( "The VK_EXT_global_priority extension has been promoted to VK_KHR_global_priority." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityExtensionName = VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_global_priority extension has been promoted to VK_KHR_global_priority." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPrioritySpecVersion = VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION; + + //=== VK_KHR_shader_subgroup_extended_types === + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_subgroup_extended_types extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesExtensionName = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_subgroup_extended_types extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesSpecVersion = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION; + + //=== VK_KHR_8bit_storage === + VULKAN_HPP_DEPRECATED( "The VK_KHR_8bit_storage extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageExtensionName = VK_KHR_8BIT_STORAGE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_8bit_storage extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageSpecVersion = VK_KHR_8BIT_STORAGE_SPEC_VERSION; + + //=== VK_EXT_external_memory_host === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryHostExtensionName = VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryHostSpecVersion = VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION; + + //=== VK_AMD_buffer_marker === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDBufferMarkerExtensionName = VK_AMD_BUFFER_MARKER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDBufferMarkerSpecVersion = VK_AMD_BUFFER_MARKER_SPEC_VERSION; + + //=== VK_KHR_shader_atomic_int64 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_atomic_int64 extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64ExtensionName = VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_atomic_int64 extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64SpecVersion = VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION; + + //=== VK_KHR_shader_clock === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderClockExtensionName = VK_KHR_SHADER_CLOCK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderClockSpecVersion = VK_KHR_SHADER_CLOCK_SPEC_VERSION; + + //=== VK_AMD_pipeline_compiler_control === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDPipelineCompilerControlExtensionName = VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDPipelineCompilerControlSpecVersion = VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION; + + //=== VK_EXT_calibrated_timestamps === + VULKAN_HPP_DEPRECATED( "The VK_EXT_calibrated_timestamps extension has been promoted to VK_KHR_calibrated_timestamps." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsExtensionName = VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_calibrated_timestamps extension has been promoted to VK_KHR_calibrated_timestamps." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsSpecVersion = VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION; + + //=== VK_AMD_shader_core_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCorePropertiesExtensionName = VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCorePropertiesSpecVersion = VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION; + + //=== VK_KHR_video_decode_h265 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH265ExtensionName = VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH265SpecVersion = VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION; + + //=== VK_KHR_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGlobalPriorityExtensionName = VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGlobalPrioritySpecVersion = VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION; + + //=== VK_AMD_memory_overallocation_behavior === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMemoryOverallocationBehaviorExtensionName = VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMemoryOverallocationBehaviorSpecVersion = VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION; + + //=== VK_EXT_vertex_attribute_divisor === + VULKAN_HPP_DEPRECATED( "The VK_EXT_vertex_attribute_divisor extension has been promoted to VK_KHR_vertex_attribute_divisor." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorExtensionName = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_vertex_attribute_divisor extension has been promoted to VK_KHR_vertex_attribute_divisor." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + VULKAN_HPP_CONSTEXPR_INLINE auto GGPFrameTokenExtensionName = VK_GGP_FRAME_TOKEN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GGPFrameTokenSpecVersion = VK_GGP_FRAME_TOKEN_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_EXT_pipeline_creation_feedback === + VULKAN_HPP_DEPRECATED( "The VK_EXT_pipeline_creation_feedback extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackExtensionName = VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_pipeline_creation_feedback extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackSpecVersion = VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION; + + //=== VK_KHR_driver_properties === + VULKAN_HPP_DEPRECATED( "The VK_KHR_driver_properties extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesExtensionName = VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_driver_properties extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesSpecVersion = VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION; + + //=== VK_KHR_shader_float_controls === + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_float_controls extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsExtensionName = VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_float_controls extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsSpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION; + + //=== VK_NV_shader_subgroup_partitioned === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedExtensionName = VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedSpecVersion = VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION; + + //=== VK_KHR_depth_stencil_resolve === + VULKAN_HPP_DEPRECATED( "The VK_KHR_depth_stencil_resolve extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveExtensionName = VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_depth_stencil_resolve extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveSpecVersion = VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION; + + //=== VK_KHR_swapchain_mutable_format === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainMutableFormatExtensionName = VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainMutableFormatSpecVersion = VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION; + + //=== VK_NV_compute_shader_derivatives === + VULKAN_HPP_CONSTEXPR_INLINE auto NVComputeShaderDerivativesExtensionName = VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVComputeShaderDerivativesSpecVersion = VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION; + + //=== VK_NV_mesh_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto NVMeshShaderExtensionName = VK_NV_MESH_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVMeshShaderSpecVersion = VK_NV_MESH_SHADER_SPEC_VERSION; + + //=== VK_NV_fragment_shader_barycentric === + VULKAN_HPP_DEPRECATED( "The VK_NV_fragment_shader_barycentric extension has been promoted to VK_KHR_fragment_shader_barycentric." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricExtensionName = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_fragment_shader_barycentric extension has been promoted to VK_KHR_fragment_shader_barycentric." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricSpecVersion = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; + + //=== VK_NV_shader_image_footprint === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderImageFootprintExtensionName = VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderImageFootprintSpecVersion = VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION; + + //=== VK_NV_scissor_exclusive === + VULKAN_HPP_CONSTEXPR_INLINE auto NVScissorExclusiveExtensionName = VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVScissorExclusiveSpecVersion = VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION; + + //=== VK_NV_device_diagnostic_checkpoints === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticCheckpointsExtensionName = VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticCheckpointsSpecVersion = VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION; + + //=== VK_KHR_timeline_semaphore === + VULKAN_HPP_DEPRECATED( "The VK_KHR_timeline_semaphore extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreExtensionName = VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_timeline_semaphore extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreSpecVersion = VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION; + + //=== VK_INTEL_shader_integer_functions2 === + VULKAN_HPP_CONSTEXPR_INLINE auto INTELShaderIntegerFunctions2ExtensionName = VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto INTELShaderIntegerFunctions2SpecVersion = VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION; + + //=== VK_INTEL_performance_query === + VULKAN_HPP_CONSTEXPR_INLINE auto INTELPerformanceQueryExtensionName = VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto INTELPerformanceQuerySpecVersion = VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION; + + //=== VK_KHR_vulkan_memory_model === + VULKAN_HPP_DEPRECATED( "The VK_KHR_vulkan_memory_model extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelExtensionName = VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_vulkan_memory_model extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelSpecVersion = VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION; + + //=== VK_EXT_pci_bus_info === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPciBusInfoExtensionName = VK_EXT_PCI_BUS_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPciBusInfoSpecVersion = VK_EXT_PCI_BUS_INFO_SPEC_VERSION; + + //=== VK_AMD_display_native_hdr === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDisplayNativeHdrExtensionName = VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDisplayNativeHdrSpecVersion = VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAImagepipeSurfaceExtensionName = VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAImagepipeSurfaceSpecVersion = VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_KHR_shader_terminate_invocation === + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_terminate_invocation extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationExtensionName = VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_terminate_invocation extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationSpecVersion = VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalSurfaceExtensionName = VK_EXT_METAL_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalSurfaceSpecVersion = VK_EXT_METAL_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapSpecVersion = VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION; + + //=== VK_EXT_scalar_block_layout === + VULKAN_HPP_DEPRECATED( "The VK_EXT_scalar_block_layout extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutExtensionName = VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_scalar_block_layout extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutSpecVersion = VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION; + + //=== VK_GOOGLE_hlsl_functionality1 === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEHlslFunctionality1ExtensionName = VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEHlslFunctionality1SpecVersion = VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION; + + //=== VK_GOOGLE_decorate_string === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDecorateStringExtensionName = VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDecorateStringSpecVersion = VK_GOOGLE_DECORATE_STRING_SPEC_VERSION; + + //=== VK_EXT_subgroup_size_control === + VULKAN_HPP_DEPRECATED( "The VK_EXT_subgroup_size_control extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlExtensionName = VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_subgroup_size_control extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlSpecVersion = VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION; + + //=== VK_KHR_fragment_shading_rate === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShadingRateExtensionName = VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShadingRateSpecVersion = VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION; + + //=== VK_AMD_shader_core_properties2 === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCoreProperties2ExtensionName = VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCoreProperties2SpecVersion = VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION; + + //=== VK_AMD_device_coherent_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDeviceCoherentMemoryExtensionName = VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDeviceCoherentMemorySpecVersion = VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION; + + //=== VK_EXT_shader_image_atomic_int64 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderImageAtomicInt64ExtensionName = VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderImageAtomicInt64SpecVersion = VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION; + + //=== VK_KHR_spirv_1_4 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_spirv_1_4 extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14ExtensionName = VK_KHR_SPIRV_1_4_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_spirv_1_4 extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14SpecVersion = VK_KHR_SPIRV_1_4_SPEC_VERSION; + + //=== VK_EXT_memory_budget === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryBudgetExtensionName = VK_EXT_MEMORY_BUDGET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryBudgetSpecVersion = VK_EXT_MEMORY_BUDGET_SPEC_VERSION; + + //=== VK_EXT_memory_priority === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryPriorityExtensionName = VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryPrioritySpecVersion = VK_EXT_MEMORY_PRIORITY_SPEC_VERSION; + + //=== VK_KHR_surface_protected_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceProtectedCapabilitiesExtensionName = VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceProtectedCapabilitiesSpecVersion = VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION; + + //=== VK_NV_dedicated_allocation_image_aliasing === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationImageAliasingExtensionName = VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationImageAliasingSpecVersion = VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION; + + //=== VK_KHR_separate_depth_stencil_layouts === + VULKAN_HPP_DEPRECATED( "The VK_KHR_separate_depth_stencil_layouts extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsExtensionName = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_separate_depth_stencil_layouts extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsSpecVersion = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION; + + //=== VK_EXT_buffer_device_address === + VULKAN_HPP_DEPRECATED( "The VK_EXT_buffer_device_address extension has been deprecated by VK_KHR_buffer_device_address." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBufferDeviceAddressExtensionName = VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_buffer_device_address extension has been deprecated by VK_KHR_buffer_device_address." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBufferDeviceAddressSpecVersion = VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; + + //=== VK_EXT_tooling_info === + VULKAN_HPP_DEPRECATED( "The VK_EXT_tooling_info extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoExtensionName = VK_EXT_TOOLING_INFO_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_tooling_info extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoSpecVersion = VK_EXT_TOOLING_INFO_SPEC_VERSION; + + //=== VK_EXT_separate_stencil_usage === + VULKAN_HPP_DEPRECATED( "The VK_EXT_separate_stencil_usage extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageExtensionName = VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_separate_stencil_usage extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageSpecVersion = VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION; + + //=== VK_EXT_validation_features === + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_features extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFeaturesExtensionName = VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_features extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFeaturesSpecVersion = VK_EXT_VALIDATION_FEATURES_SPEC_VERSION; + + //=== VK_KHR_present_wait === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentWaitExtensionName = VK_KHR_PRESENT_WAIT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentWaitSpecVersion = VK_KHR_PRESENT_WAIT_SPEC_VERSION; + + //=== VK_NV_cooperative_matrix === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrixExtensionName = VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrixSpecVersion = VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION; + + //=== VK_NV_coverage_reduction_mode === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCoverageReductionModeExtensionName = VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCoverageReductionModeSpecVersion = VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION; + + //=== VK_EXT_fragment_shader_interlock === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentShaderInterlockExtensionName = VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentShaderInterlockSpecVersion = VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION; + + //=== VK_EXT_ycbcr_image_arrays === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcrImageArraysExtensionName = VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcrImageArraysSpecVersion = VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION; + + //=== VK_KHR_uniform_buffer_standard_layout === + VULKAN_HPP_DEPRECATED( "The VK_KHR_uniform_buffer_standard_layout extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutExtensionName = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_uniform_buffer_standard_layout extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutSpecVersion = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION; + + //=== VK_EXT_provoking_vertex === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTProvokingVertexExtensionName = VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTProvokingVertexSpecVersion = VK_EXT_PROVOKING_VERTEX_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFullScreenExclusiveExtensionName = VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFullScreenExclusiveSpecVersion = VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHeadlessSurfaceExtensionName = VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHeadlessSurfaceSpecVersion = VK_EXT_HEADLESS_SURFACE_SPEC_VERSION; + + //=== VK_KHR_buffer_device_address === + VULKAN_HPP_DEPRECATED( "The VK_KHR_buffer_device_address extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressExtensionName = VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_buffer_device_address extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressSpecVersion = VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; + + //=== VK_EXT_line_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationExtensionName = VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationSpecVersion = VK_EXT_LINE_RASTERIZATION_SPEC_VERSION; + + //=== VK_EXT_shader_atomic_float === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatSpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION; + + //=== VK_EXT_host_query_reset === + VULKAN_HPP_DEPRECATED( "The VK_EXT_host_query_reset extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetExtensionName = VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_host_query_reset extension has been promoted to core in version 1.2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetSpecVersion = VK_EXT_HOST_QUERY_RESET_SPEC_VERSION; + + //=== VK_EXT_index_type_uint8 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8ExtensionName = VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8SpecVersion = VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state === + VULKAN_HPP_DEPRECATED( "The VK_EXT_extended_dynamic_state extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_extended_dynamic_state extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateSpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_KHR_deferred_host_operations === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeferredHostOperationsExtensionName = VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeferredHostOperationsSpecVersion = VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION; + + //=== VK_KHR_pipeline_executable_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineExecutablePropertiesExtensionName = VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineExecutablePropertiesSpecVersion = VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION; + + //=== VK_EXT_host_image_copy === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostImageCopyExtensionName = VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostImageCopySpecVersion = VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION; + + //=== VK_KHR_map_memory2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2ExtensionName = VK_KHR_MAP_MEMORY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2SpecVersion = VK_KHR_MAP_MEMORY_2_SPEC_VERSION; + + //=== VK_EXT_shader_atomic_float2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2ExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2SpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION; + + //=== VK_EXT_surface_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSurfaceMaintenance1ExtensionName = VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSurfaceMaintenance1SpecVersion = VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_swapchain_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainMaintenance1ExtensionName = VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainMaintenance1SpecVersion = VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_shader_demote_to_helper_invocation === + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_demote_to_helper_invocation extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationExtensionName = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_demote_to_helper_invocation extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationSpecVersion = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION; + + //=== VK_NV_device_generated_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsExtensionName = VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsSpecVersion = VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION; + + //=== VK_NV_inherited_viewport_scissor === + VULKAN_HPP_CONSTEXPR_INLINE auto NVInheritedViewportScissorExtensionName = VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVInheritedViewportScissorSpecVersion = VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION; + + //=== VK_KHR_shader_integer_dot_product === + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_integer_dot_product extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductExtensionName = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_integer_dot_product extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductSpecVersion = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION; + + //=== VK_EXT_texel_buffer_alignment === + VULKAN_HPP_DEPRECATED( "The VK_EXT_texel_buffer_alignment extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentExtensionName = VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_texel_buffer_alignment extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentSpecVersion = VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION; + + //=== VK_QCOM_render_pass_transform === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassTransformExtensionName = VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassTransformSpecVersion = VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION; + + //=== VK_EXT_depth_bias_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthBiasControlExtensionName = VK_EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthBiasControlSpecVersion = VK_EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION; + + //=== VK_EXT_device_memory_report === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceMemoryReportExtensionName = VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceMemoryReportSpecVersion = VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION; + + //=== VK_EXT_acquire_drm_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireDrmDisplayExtensionName = VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireDrmDisplaySpecVersion = VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION; + + //=== VK_EXT_robustness2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRobustness2ExtensionName = VK_EXT_ROBUSTNESS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRobustness2SpecVersion = VK_EXT_ROBUSTNESS_2_SPEC_VERSION; + + //=== VK_EXT_custom_border_color === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCustomBorderColorExtensionName = VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCustomBorderColorSpecVersion = VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION; + + //=== VK_GOOGLE_user_type === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEUserTypeExtensionName = VK_GOOGLE_USER_TYPE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEUserTypeSpecVersion = VK_GOOGLE_USER_TYPE_SPEC_VERSION; + + //=== VK_KHR_pipeline_library === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineLibraryExtensionName = VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineLibrarySpecVersion = VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION; + + //=== VK_NV_present_barrier === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentBarrierExtensionName = VK_NV_PRESENT_BARRIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentBarrierSpecVersion = VK_NV_PRESENT_BARRIER_SPEC_VERSION; + + //=== VK_KHR_shader_non_semantic_info === + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_non_semantic_info extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoExtensionName = VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_non_semantic_info extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoSpecVersion = VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION; + + //=== VK_KHR_present_id === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdExtensionName = VK_KHR_PRESENT_ID_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdSpecVersion = VK_KHR_PRESENT_ID_SPEC_VERSION; + + //=== VK_EXT_private_data === + VULKAN_HPP_DEPRECATED( "The VK_EXT_private_data extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataExtensionName = VK_EXT_PRIVATE_DATA_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_private_data extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataSpecVersion = VK_EXT_PRIVATE_DATA_SPEC_VERSION; + + //=== VK_EXT_pipeline_creation_cache_control === + VULKAN_HPP_DEPRECATED( "The VK_EXT_pipeline_creation_cache_control extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlExtensionName = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_pipeline_creation_cache_control extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlSpecVersion = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION; + + //=== VK_KHR_video_encode_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQueueExtensionName = VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQueueSpecVersion = VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION; + + //=== VK_NV_device_diagnostics_config === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticsConfigExtensionName = VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticsConfigSpecVersion = VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION; + + //=== VK_QCOM_render_pass_store_ops === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassStoreOpsExtensionName = VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassStoreOpsSpecVersion = VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCudaKernelLaunchExtensionName = VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCudaKernelLaunchSpecVersion = VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_low_latency === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatencyExtensionName = VK_NV_LOW_LATENCY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatencySpecVersion = VK_NV_LOW_LATENCY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalObjectsExtensionName = VK_EXT_METAL_OBJECTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalObjectsSpecVersion = VK_EXT_METAL_OBJECTS_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_synchronization2 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2ExtensionName = VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_synchronization2 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2SpecVersion = VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION; + + //=== VK_EXT_descriptor_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferExtensionName = VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferSpecVersion = VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION; + + //=== VK_EXT_graphics_pipeline_library === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGraphicsPipelineLibraryExtensionName = VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGraphicsPipelineLibrarySpecVersion = VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderEarlyAndLateFragmentTestsExtensionName = VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderEarlyAndLateFragmentTestsSpecVersion = VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION; + + //=== VK_KHR_fragment_shader_barycentric === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShaderBarycentricExtensionName = VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShaderBarycentricSpecVersion = VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupUniformControlFlowExtensionName = VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupUniformControlFlowSpecVersion = VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION; + + //=== VK_KHR_zero_initialize_workgroup_memory === + VULKAN_HPP_DEPRECATED( "The VK_KHR_zero_initialize_workgroup_memory extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemoryExtensionName = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_zero_initialize_workgroup_memory extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemorySpecVersion = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION; + + //=== VK_NV_fragment_shading_rate_enums === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShadingRateEnumsExtensionName = VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShadingRateEnumsSpecVersion = VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION; + + //=== VK_NV_ray_tracing_motion_blur === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingMotionBlurExtensionName = VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingMotionBlurSpecVersion = VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION; + + //=== VK_EXT_mesh_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMeshShaderExtensionName = VK_EXT_MESH_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMeshShaderSpecVersion = VK_EXT_MESH_SHADER_SPEC_VERSION; + + //=== VK_EXT_ycbcr_2plane_444_formats === + VULKAN_HPP_DEPRECATED( "The VK_EXT_ycbcr_2plane_444_formats extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsExtensionName = VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_ycbcr_2plane_444_formats extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsSpecVersion = VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION; + + //=== VK_EXT_fragment_density_map2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMap2ExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMap2SpecVersion = VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION; + + //=== VK_QCOM_rotated_copy_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRotatedCopyCommandsExtensionName = VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRotatedCopyCommandsSpecVersion = VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION; + + //=== VK_EXT_image_robustness === + VULKAN_HPP_DEPRECATED( "The VK_EXT_image_robustness extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessExtensionName = VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_image_robustness extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessSpecVersion = VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION; + + //=== VK_KHR_workgroup_memory_explicit_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutExtensionName = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutSpecVersion = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION; + + //=== VK_KHR_copy_commands2 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_copy_commands2 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2ExtensionName = VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_copy_commands2 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2SpecVersion = VK_KHR_COPY_COMMANDS_2_SPEC_VERSION; + + //=== VK_EXT_image_compression_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlExtensionName = VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSpecVersion = VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION; + + //=== VK_EXT_attachment_feedback_loop_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopLayoutExtensionName = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopLayoutSpecVersion = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION; + + //=== VK_EXT_4444_formats === + VULKAN_HPP_DEPRECATED( "The VK_EXT_4444_formats extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsExtensionName = VK_EXT_4444_FORMATS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_4444_formats extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsSpecVersion = VK_EXT_4444_FORMATS_SPEC_VERSION; + + //=== VK_EXT_device_fault === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultExtensionName = VK_EXT_DEVICE_FAULT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultSpecVersion = VK_EXT_DEVICE_FAULT_SPEC_VERSION; + + //=== VK_ARM_rasterization_order_attachment_access === + VULKAN_HPP_DEPRECATED( "The VK_ARM_rasterization_order_attachment_access extension has been promoted to VK_EXT_rasterization_order_attachment_access." ) + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessExtensionName = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_ARM_rasterization_order_attachment_access extension has been promoted to VK_EXT_rasterization_order_attachment_access." ) + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessSpecVersion = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; + + //=== VK_EXT_rgba10x6_formats === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRgba10X6FormatsExtensionName = VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRgba10X6FormatsSpecVersion = VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + VULKAN_HPP_CONSTEXPR_INLINE auto NVAcquireWinrtDisplayExtensionName = VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVAcquireWinrtDisplaySpecVersion = VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectfbSurfaceExtensionName = VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectfbSurfaceSpecVersion = VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_VALVE_mutable_descriptor_type === + VULKAN_HPP_DEPRECATED( "The VK_VALVE_mutable_descriptor_type extension has been promoted to VK_EXT_mutable_descriptor_type." ) + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeExtensionName = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_VALVE_mutable_descriptor_type extension has been promoted to VK_EXT_mutable_descriptor_type." ) + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeSpecVersion = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; + + //=== VK_EXT_vertex_input_dynamic_state === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexInputDynamicStateExtensionName = VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexInputDynamicStateSpecVersion = VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_EXT_physical_device_drm === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPhysicalDeviceDrmExtensionName = VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPhysicalDeviceDrmSpecVersion = VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION; + + //=== VK_EXT_device_address_binding_report === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceAddressBindingReportExtensionName = VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceAddressBindingReportSpecVersion = VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION; + + //=== VK_EXT_depth_clip_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipControlExtensionName = VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipControlSpecVersion = VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION; + + //=== VK_EXT_primitive_topology_list_restart === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitiveTopologyListRestartExtensionName = VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitiveTopologyListRestartSpecVersion = VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION; + + //=== VK_KHR_format_feature_flags2 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_format_feature_flags2 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2ExtensionName = VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_format_feature_flags2 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2SpecVersion = VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalMemoryExtensionName = VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalMemorySpecVersion = VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalSemaphoreExtensionName = VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalSemaphoreSpecVersion = VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIABufferCollectionExtensionName = VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIABufferCollectionSpecVersion = VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEISubpassShadingExtensionName = VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEISubpassShadingSpecVersion = VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION; + + //=== VK_HUAWEI_invocation_mask === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIInvocationMaskExtensionName = VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIInvocationMaskSpecVersion = VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION; + + //=== VK_NV_external_memory_rdma === + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryRdmaExtensionName = VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryRdmaSpecVersion = VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION; + + //=== VK_EXT_pipeline_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelinePropertiesExtensionName = VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelinePropertiesSpecVersion = VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION; + + //=== VK_EXT_frame_boundary === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFrameBoundaryExtensionName = VK_EXT_FRAME_BOUNDARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFrameBoundarySpecVersion = VK_EXT_FRAME_BOUNDARY_SPEC_VERSION; + + //=== VK_EXT_multisampled_render_to_single_sampled === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultisampledRenderToSingleSampledExtensionName = VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultisampledRenderToSingleSampledSpecVersion = VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state2 === + VULKAN_HPP_DEPRECATED( "The VK_EXT_extended_dynamic_state2 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2ExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_extended_dynamic_state2 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto QNXScreenSurfaceExtensionName = VK_QNX_SCREEN_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QNXScreenSurfaceSpecVersion = VK_QNX_SCREEN_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTColorWriteEnableExtensionName = VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTColorWriteEnableSpecVersion = VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION; + + //=== VK_EXT_primitives_generated_query === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitivesGeneratedQueryExtensionName = VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitivesGeneratedQuerySpecVersion = VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingMaintenance1ExtensionName = VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingMaintenance1SpecVersion = VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_global_priority_query === + VULKAN_HPP_DEPRECATED( "The VK_EXT_global_priority_query extension has been promoted to VK_KHR_global_priority." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQueryExtensionName = VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_global_priority_query extension has been promoted to VK_KHR_global_priority." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQuerySpecVersion = VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION; + + //=== VK_EXT_image_view_min_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageViewMinLodExtensionName = VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageViewMinLodSpecVersion = VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION; + + //=== VK_EXT_multi_draw === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultiDrawExtensionName = VK_EXT_MULTI_DRAW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultiDrawSpecVersion = VK_EXT_MULTI_DRAW_SPEC_VERSION; + + //=== VK_EXT_image_2d_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImage2DViewOf3DExtensionName = VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImage2DViewOf3DSpecVersion = VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION; + + //=== VK_KHR_portability_enumeration === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilityEnumerationExtensionName = VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilityEnumerationSpecVersion = VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION; + + //=== VK_EXT_shader_tile_image === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderTileImageExtensionName = VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderTileImageSpecVersion = VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION; + + //=== VK_EXT_opacity_micromap === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTOpacityMicromapExtensionName = VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTOpacityMicromapSpecVersion = VK_EXT_OPACITY_MICROMAP_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplacementMicromapExtensionName = VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplacementMicromapSpecVersion = VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_load_store_op_none === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneExtensionName = VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneSpecVersion = VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION; + + //=== VK_HUAWEI_cluster_culling_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIClusterCullingShaderExtensionName = VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIClusterCullingShaderSpecVersion = VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION; + + //=== VK_EXT_border_color_swizzle === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBorderColorSwizzleExtensionName = VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBorderColorSwizzleSpecVersion = VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION; + + //=== VK_EXT_pageable_device_local_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPageableDeviceLocalMemoryExtensionName = VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPageableDeviceLocalMemorySpecVersion = VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION; + + //=== VK_KHR_maintenance4 === + VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance4 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4ExtensionName = VK_KHR_MAINTENANCE_4_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance4 extension has been promoted to core in version 1.3." ) + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4SpecVersion = VK_KHR_MAINTENANCE_4_SPEC_VERSION; + + //=== VK_ARM_shader_core_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCorePropertiesExtensionName = VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCorePropertiesSpecVersion = VK_ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION; + + //=== VK_ARM_scheduling_controls === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMSchedulingControlsExtensionName = VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMSchedulingControlsSpecVersion = VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION; + + //=== VK_EXT_image_sliced_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageSlicedViewOf3DExtensionName = VK_EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageSlicedViewOf3DSpecVersion = VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION; + + //=== VK_VALVE_descriptor_set_host_mapping === + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEDescriptorSetHostMappingExtensionName = VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEDescriptorSetHostMappingSpecVersion = VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION; + + //=== VK_EXT_depth_clamp_zero_one === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampZeroOneExtensionName = VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampZeroOneSpecVersion = VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION; + + //=== VK_EXT_non_seamless_cube_map === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNonSeamlessCubeMapExtensionName = VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNonSeamlessCubeMapSpecVersion = VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION; + + //=== VK_ARM_render_pass_striped === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRenderPassStripedExtensionName = VK_ARM_RENDER_PASS_STRIPED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRenderPassStripedSpecVersion = VK_ARM_RENDER_PASS_STRIPED_SPEC_VERSION; + + //=== VK_QCOM_fragment_density_map_offset === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFragmentDensityMapOffsetExtensionName = VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFragmentDensityMapOffsetSpecVersion = VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION; + + //=== VK_NV_copy_memory_indirect === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCopyMemoryIndirectExtensionName = VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCopyMemoryIndirectSpecVersion = VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION; + + //=== VK_NV_memory_decompression === + VULKAN_HPP_CONSTEXPR_INLINE auto NVMemoryDecompressionExtensionName = VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVMemoryDecompressionSpecVersion = VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION; + + //=== VK_NV_device_generated_commands_compute === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsComputeExtensionName = VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsComputeSpecVersion = VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION; + + //=== VK_NV_linear_color_attachment === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLinearColorAttachmentExtensionName = VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLinearColorAttachmentSpecVersion = VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION; + + //=== VK_GOOGLE_surfaceless_query === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLESurfacelessQueryExtensionName = VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLESurfacelessQuerySpecVersion = VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION; + + //=== VK_EXT_image_compression_control_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSwapchainExtensionName = VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSwapchainSpecVersion = VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION; + + //=== VK_QCOM_image_processing === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessingExtensionName = VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessingSpecVersion = VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION; + + //=== VK_EXT_nested_command_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNestedCommandBufferExtensionName = VK_EXT_NESTED_COMMAND_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNestedCommandBufferSpecVersion = VK_EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION; + + //=== VK_EXT_external_memory_acquire_unmodified === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryAcquireUnmodifiedExtensionName = VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryAcquireUnmodifiedSpecVersion = VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state3 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState3ExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState3SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION; + + //=== VK_EXT_subpass_merge_feedback === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubpassMergeFeedbackExtensionName = VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubpassMergeFeedbackSpecVersion = VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION; + + //=== VK_LUNARG_direct_driver_loading === + VULKAN_HPP_CONSTEXPR_INLINE auto LUNARGDirectDriverLoadingExtensionName = VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto LUNARGDirectDriverLoadingSpecVersion = VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION; + + //=== VK_EXT_shader_module_identifier === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderModuleIdentifierExtensionName = VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderModuleIdentifierSpecVersion = VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION; + + //=== VK_EXT_rasterization_order_attachment_access === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRasterizationOrderAttachmentAccessExtensionName = VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRasterizationOrderAttachmentAccessSpecVersion = VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; + + //=== VK_NV_optical_flow === + VULKAN_HPP_CONSTEXPR_INLINE auto NVOpticalFlowExtensionName = VK_NV_OPTICAL_FLOW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVOpticalFlowSpecVersion = VK_NV_OPTICAL_FLOW_SPEC_VERSION; + + //=== VK_EXT_legacy_dithering === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyDitheringExtensionName = VK_EXT_LEGACY_DITHERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyDitheringSpecVersion = VK_EXT_LEGACY_DITHERING_SPEC_VERSION; + + //=== VK_EXT_pipeline_protected_access === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineProtectedAccessExtensionName = VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineProtectedAccessSpecVersion = VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalFormatResolveExtensionName = VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalFormatResolveSpecVersion = VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_maintenance5 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5ExtensionName = VK_KHR_MAINTENANCE_5_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5SpecVersion = VK_KHR_MAINTENANCE_5_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_position_fetch === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchExtensionName = VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchSpecVersion = VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION; + + //=== VK_EXT_shader_object === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderObjectExtensionName = VK_EXT_SHADER_OBJECT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderObjectSpecVersion = VK_EXT_SHADER_OBJECT_SPEC_VERSION; + + //=== VK_QCOM_tile_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesExtensionName = VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesSpecVersion = VK_QCOM_TILE_PROPERTIES_SPEC_VERSION; + + //=== VK_SEC_amigo_profiling === + VULKAN_HPP_CONSTEXPR_INLINE auto SECAmigoProfilingExtensionName = VK_SEC_AMIGO_PROFILING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto SECAmigoProfilingSpecVersion = VK_SEC_AMIGO_PROFILING_SPEC_VERSION; + + //=== VK_QCOM_multiview_per_view_viewports === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewViewportsExtensionName = VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewViewportsSpecVersion = VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION; + + //=== VK_NV_ray_tracing_invocation_reorder === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingInvocationReorderExtensionName = VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingInvocationReorderSpecVersion = VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION; + + //=== VK_NV_extended_sparse_address_space === + VULKAN_HPP_CONSTEXPR_INLINE auto NVExtendedSparseAddressSpaceExtensionName = VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVExtendedSparseAddressSpaceSpecVersion = VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION; + + //=== VK_EXT_mutable_descriptor_type === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMutableDescriptorTypeExtensionName = VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMutableDescriptorTypeSpecVersion = VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; + + //=== VK_EXT_layer_settings === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsExtensionName = VK_EXT_LAYER_SETTINGS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsSpecVersion = VK_EXT_LAYER_SETTINGS_SPEC_VERSION; + + //=== VK_ARM_shader_core_builtins === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCoreBuiltinsExtensionName = VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCoreBuiltinsSpecVersion = VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION; + + //=== VK_EXT_pipeline_library_group_handles === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineLibraryGroupHandlesExtensionName = VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineLibraryGroupHandlesSpecVersion = VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDynamicRenderingUnusedAttachmentsExtensionName = VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDynamicRenderingUnusedAttachmentsSpecVersion = VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION; + + //=== VK_NV_low_latency2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatency2ExtensionName = VK_NV_LOW_LATENCY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatency2SpecVersion = VK_NV_LOW_LATENCY_2_SPEC_VERSION; + + //=== VK_KHR_cooperative_matrix === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCooperativeMatrixExtensionName = VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCooperativeMatrixSpecVersion = VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION; + + //=== VK_QCOM_multiview_per_view_render_areas === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewRenderAreasExtensionName = VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewRenderAreasSpecVersion = VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION; + + //=== VK_KHR_video_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1ExtensionName = VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1SpecVersion = VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_NV_per_stage_descriptor_set === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPerStageDescriptorSetExtensionName = VK_NV_PER_STAGE_DESCRIPTOR_SET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPerStageDescriptorSetSpecVersion = VK_NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION; + + //=== VK_QCOM_image_processing2 === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessing2ExtensionName = VK_QCOM_IMAGE_PROCESSING_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessing2SpecVersion = VK_QCOM_IMAGE_PROCESSING_2_SPEC_VERSION; + + //=== VK_QCOM_filter_cubic_weights === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicWeightsExtensionName = VK_QCOM_FILTER_CUBIC_WEIGHTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicWeightsSpecVersion = VK_QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION; + + //=== VK_QCOM_ycbcr_degamma === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMYcbcrDegammaExtensionName = VK_QCOM_YCBCR_DEGAMMA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMYcbcrDegammaSpecVersion = VK_QCOM_YCBCR_DEGAMMA_SPEC_VERSION; + + //=== VK_QCOM_filter_cubic_clamp === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicClampExtensionName = VK_QCOM_FILTER_CUBIC_CLAMP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicClampSpecVersion = VK_QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopDynamicStateExtensionName = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopDynamicStateSpecVersion = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_KHR_vertex_attribute_divisor === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVertexAttributeDivisorExtensionName = VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVertexAttributeDivisorSpecVersion = VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto QNXExternalMemoryScreenBufferExtensionName = VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QNXExternalMemoryScreenBufferSpecVersion = VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + VULKAN_HPP_CONSTEXPR_INLINE auto MSFTLayeredDriverExtensionName = VK_MSFT_LAYERED_DRIVER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto MSFTLayeredDriverSpecVersion = VK_MSFT_LAYERED_DRIVER_SPEC_VERSION; + + //=== VK_KHR_calibrated_timestamps === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCalibratedTimestampsExtensionName = VK_KHR_CALIBRATED_TIMESTAMPS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCalibratedTimestampsSpecVersion = VK_KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION; + + //=== VK_KHR_maintenance6 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance6ExtensionName = VK_KHR_MAINTENANCE_6_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance6SpecVersion = VK_KHR_MAINTENANCE_6_SPEC_VERSION; + + //=== VK_NV_descriptor_pool_overallocation === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationExtensionName = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationSpecVersion = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION; + +} // namespace VULKAN_HPP_NAMESPACE + +// clang-format off +#include +#include +#include + +// clang-format on + +namespace VULKAN_HPP_NAMESPACE +{ +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + + //======================= + //=== STRUCTS EXTENDS === + //======================= + + //=== VK_VERSION_1_0 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_3 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_swapchain === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_display_swapchain === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_debug_report === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_rasterization_order === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_queue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_decode_queue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_dedicated_allocation === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_transform_feedback === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_h264 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_h265 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_decode_h264 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_texture_gather_bias_lod === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_dynamic_rendering === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_corner_sampled_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_external_memory === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_validation_flags === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_astc_decode_mode === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pipeline_robustness === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_push_descriptor === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_conditional_rendering === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_incremental_present === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_clip_space_w_scaling === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_display_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_GOOGLE_display_timing === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NVX_multiview_per_view_attributes === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_viewport_swizzle === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_discard_rectangles === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_conservative_rasterization === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_depth_clip_enable === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_IMG_relaxed_line_rasterization === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shared_presentable_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_performance_query === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_debug_utils === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_blend_operation_advanced === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_fragment_coverage_to_color === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_acceleration_structure === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_ray_tracing_pipeline === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_ray_query === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_framebuffer_mixed_samples === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_shader_sm_builtins === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_drm_format_modifier === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_validation_cache === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_ray_tracing === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_representative_fragment_test === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_filter_cubic === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_external_memory_host === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shader_clock === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_pipeline_compiler_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_shader_core_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_decode_h265 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_global_priority === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_memory_overallocation_behavior === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_vertex_attribute_divisor === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_compute_shader_derivatives === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_mesh_shader === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_shader_image_footprint === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_scissor_exclusive === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_device_diagnostic_checkpoints === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_INTEL_shader_integer_functions2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_INTEL_performance_query === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pci_bus_info === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_display_native_hdr === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_fragment_density_map === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_fragment_shading_rate === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_shader_core_properties2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_device_coherent_memory === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_image_atomic_int64 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_memory_budget === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_memory_priority === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_surface_protected_capabilities === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_dedicated_allocation_image_aliasing === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_buffer_device_address === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_validation_features === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_present_wait === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_cooperative_matrix === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_coverage_reduction_mode === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_fragment_shader_interlock === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_ycbcr_image_arrays === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_provoking_vertex === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_line_rasterization === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_atomic_float === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_index_type_uint8 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_extended_dynamic_state === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_pipeline_executable_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_host_image_copy === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_atomic_float2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_surface_maintenance1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_swapchain_maintenance1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_device_generated_commands === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_inherited_viewport_scissor === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_texel_buffer_alignment === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_render_pass_transform === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_depth_bias_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_device_memory_report === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_robustness2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_custom_border_color === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_pipeline_library === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_present_barrier === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_present_id === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_queue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_device_diagnostics_config === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_low_latency === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_descriptor_buffer === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_graphics_pipeline_library === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_fragment_shader_barycentric === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_fragment_shading_rate_enums === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_ray_tracing_motion_blur === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_mesh_shader === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_ycbcr_2plane_444_formats === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_fragment_density_map2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_rotated_copy_commands === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_workgroup_memory_explicit_layout === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_compression_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_attachment_feedback_loop_layout === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_4444_formats === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_device_fault === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_rgba10x6_formats === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_vertex_input_dynamic_state === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_physical_device_drm === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_device_address_binding_report === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_depth_clip_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_primitive_topology_list_restart === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_HUAWEI_invocation_mask === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_external_memory_rdma === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pipeline_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_frame_boundary === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_multisampled_render_to_single_sampled === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_extended_dynamic_state2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_color_write_enable === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_primitives_generated_query === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_ray_tracing_maintenance1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_view_min_lod === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_multi_draw === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_2d_view_of_3d === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_tile_image === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_opacity_micromap === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_border_color_swizzle === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pageable_device_local_memory === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_shader_core_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_scheduling_controls === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_sliced_view_of_3d === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VALVE_descriptor_set_host_mapping === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_depth_clamp_zero_one === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_non_seamless_cube_map === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_render_pass_striped === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_fragment_density_map_offset === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_copy_memory_indirect === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_memory_decompression === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_device_generated_commands_compute === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_linear_color_attachment === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_image_compression_control_swapchain === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_image_processing === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_nested_command_buffer === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_external_memory_acquire_unmodified === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_extended_dynamic_state3 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_subpass_merge_feedback === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_LUNARG_direct_driver_loading === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_module_identifier === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_rasterization_order_attachment_access === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_optical_flow === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_legacy_dithering === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pipeline_protected_access === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_maintenance5 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_ray_tracing_position_fetch === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_shader_object === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_tile_properties === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_SEC_amigo_profiling === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_multiview_per_view_viewports === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_ray_tracing_invocation_reorder === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_extended_sparse_address_space === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_mutable_descriptor_type === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_layer_settings === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_ARM_shader_core_builtins === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_pipeline_library_group_handles === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_low_latency2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_cooperative_matrix === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_multiview_per_view_render_areas === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_maintenance1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_per_stage_descriptor_set === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_image_processing2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_filter_cubic_weights === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_ycbcr_degamma === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_QCOM_filter_cubic_clamp === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_vertex_attribute_divisor === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_maintenance6 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_descriptor_pool_overallocation === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + +#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + class DynamicLoader + { + public: +# ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT +# else + DynamicLoader( std::string const & vulkanLibraryName = {} ) +# endif + { + if ( !vulkanLibraryName.empty() ) + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); +# elif defined( _WIN32 ) + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); +# else +# error unsupported platform +# endif + } + else + { +# if defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( m_library == nullptr ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } +# elif defined( __APPLE__ ) + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); +# elif defined( _WIN32 ) + m_library = ::LoadLibraryA( "vulkan-1.dll" ); +# else +# error unsupported platform +# endif + } + +# ifndef VULKAN_HPP_NO_EXCEPTIONS + if ( m_library == nullptr ) + { + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. + throw std::runtime_error( "Failed to load vulkan library!" ); + } +# endif + } + + DynamicLoader( DynamicLoader const & ) = delete; + + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) + { + other.m_library = nullptr; + } + + DynamicLoader & operator=( DynamicLoader const & ) = delete; + + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_library, other.m_library ); + return *this; + } + + ~DynamicLoader() VULKAN_HPP_NOEXCEPT + { + if ( m_library ) + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + dlclose( m_library ); +# elif defined( _WIN32 ) + ::FreeLibrary( m_library ); +# else +# error unsupported platform +# endif + } + } + + template + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + return (T)dlsym( m_library, function ); +# elif defined( _WIN32 ) + return ( T )::GetProcAddress( m_library, function ); +# else +# error unsupported platform +# endif + } + + bool success() const VULKAN_HPP_NOEXCEPT + { + return m_library != nullptr; + } + + private: +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + void * m_library; +# elif defined( _WIN32 ) + ::HINSTANCE m_library; +# else +# error unsupported platform +# endif + }; +#endif + + using PFN_dummy = void ( * )(); + + class DispatchLoaderDynamic : public DispatchLoaderBase + { + public: + //=== VK_VERSION_1_0 === + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + + //=== VK_VERSION_1_2 === + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; + + //=== VK_VERSION_1_3 === + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + + //=== VK_KHR_surface === + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + + //=== VK_KHR_swapchain === + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + + //=== VK_KHR_display === + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + + //=== VK_KHR_display_swapchain === + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + + //=== VK_EXT_debug_marker === + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + + //=== VK_KHR_video_queue === + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; + + //=== VK_KHR_video_decode_queue === + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; + + //=== VK_EXT_transform_feedback === + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + + //=== VK_NVX_binary_import === + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; + + //=== VK_NVX_image_view_handle === + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + + //=== VK_AMD_draw_indirect_count === + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + + //=== VK_AMD_shader_info === + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + + //=== VK_KHR_dynamic_rendering === + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + + //=== VK_KHR_device_group === + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + + //=== VK_KHR_device_group_creation === + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + + //=== VK_KHR_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + + //=== VK_KHR_external_semaphore_capabilities === + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; + + //=== VK_KHR_push_descriptor === + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + + //=== VK_EXT_conditional_rendering === + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + + //=== VK_KHR_descriptor_update_template === + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + + //=== VK_NV_clip_space_w_scaling === + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + + //=== VK_EXT_direct_mode_display === + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + + //=== VK_EXT_display_control === + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + + //=== VK_GOOGLE_display_timing === + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + + //=== VK_EXT_discard_rectangles === + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; + + //=== VK_EXT_hdr_metadata === + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + + //=== VK_KHR_create_renderpass2 === + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + + //=== VK_KHR_shared_presentable_image === + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + + //=== VK_KHR_external_fence_capabilities === + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + + //=== VK_KHR_performance_query === + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + + //=== VK_KHR_get_surface_capabilities2 === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + + //=== VK_KHR_get_display_properties2 === + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +#else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + + //=== VK_KHR_get_memory_requirements2 === + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + + //=== VK_KHR_acceleration_structure === + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + + //=== VK_KHR_ray_tracing_pipeline === + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + + //=== VK_KHR_sampler_ycbcr_conversion === + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + + //=== VK_KHR_bind_memory2 === + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + + //=== VK_EXT_image_drm_format_modifier === + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + + //=== VK_EXT_validation_cache === + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + + //=== VK_NV_shading_rate_image === + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + + //=== VK_NV_ray_tracing === + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + + //=== VK_KHR_maintenance3 === + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + + //=== VK_KHR_draw_indirect_count === + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + + //=== VK_EXT_external_memory_host === + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; + + //=== VK_AMD_buffer_marker === + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + + //=== VK_NV_mesh_shader === + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + + //=== VK_NV_scissor_exclusive === + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + + //=== VK_NV_device_diagnostic_checkpoints === + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + + //=== VK_KHR_timeline_semaphore === + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + + //=== VK_INTEL_performance_query === + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + + //=== VK_AMD_display_native_hdr === + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + + //=== VK_EXT_buffer_device_address === + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + + //=== VK_EXT_tooling_info === + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + + //=== VK_KHR_present_wait === + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; + + //=== VK_NV_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + + //=== VK_NV_coverage_reduction_mode === + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + + //=== VK_KHR_buffer_device_address === + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; + + //=== VK_EXT_line_rasterization === + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + + //=== VK_EXT_host_query_reset === + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + + //=== VK_EXT_extended_dynamic_state === + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; + + //=== VK_KHR_deferred_host_operations === + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; + + //=== VK_KHR_pipeline_executable_properties === + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + + //=== VK_EXT_host_image_copy === + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; + + //=== VK_KHR_map_memory2 === + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; + + //=== VK_EXT_swapchain_maintenance1 === + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; + + //=== VK_NV_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + + //=== VK_EXT_depth_bias_control === + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; + + //=== VK_EXT_acquire_drm_display === + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + + //=== VK_EXT_private_data === + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + + //=== VK_KHR_video_encode_queue === + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +#else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +#else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + + //=== VK_EXT_descriptor_buffer === + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; + + //=== VK_NV_fragment_shading_rate_enums === + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + + //=== VK_EXT_mesh_shader === + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; + + //=== VK_KHR_copy_commands2 === + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + + //=== VK_EXT_device_fault === + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +#else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +#else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; + + //=== VK_HUAWEI_invocation_mask === + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; + + //=== VK_NV_external_memory_rdma === + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; + + //=== VK_EXT_pipeline_properties === + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; + + //=== VK_EXT_extended_dynamic_state2 === + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + + //=== VK_KHR_ray_tracing_maintenance1 === + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; + + //=== VK_EXT_multi_draw === + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + + //=== VK_EXT_opacity_micromap === + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; + + //=== VK_HUAWEI_cluster_culling_shader === + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; + + //=== VK_EXT_pageable_device_local_memory === + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; + + //=== VK_KHR_maintenance4 === + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; + + //=== VK_VALVE_descriptor_set_host_mapping === + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; + + //=== VK_NV_copy_memory_indirect === + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; + + //=== VK_NV_memory_decompression === + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + + //=== VK_NV_device_generated_commands_compute === + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; + + //=== VK_EXT_extended_dynamic_state3 === + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; + + //=== VK_EXT_shader_module_identifier === + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; + + //=== VK_NV_optical_flow === + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + + //=== VK_KHR_maintenance5 === + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + + //=== VK_EXT_shader_object === + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + + //=== VK_QCOM_tile_properties === + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; + + //=== VK_NV_low_latency2 === + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; + + //=== VK_KHR_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +#else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; + + //=== VK_KHR_maintenance6 === + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; + + public: + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + init( getInstanceProcAddr ); + } + + // This interface does not require a linked vulkan library. + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + { + init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); + } + + template + void init() + { + static DynamicLoader dl; + init( dl ); + } + + template + void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + init( getInstanceProcAddr ); + } + + void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getInstanceProcAddr ); + + vkGetInstanceProcAddr = getInstanceProcAddr; + + //=== VK_VERSION_1_0 === + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = + PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + } + + // This interface does not require a linked vulkan library. + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); + vkGetInstanceProcAddr = getInstanceProcAddr; + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); + } + } + + void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT + { + VkInstance instance = static_cast( instanceCpp ); + + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetInstanceProcAddr( instance, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetInstanceProcAddr( instance, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetInstanceProcAddr( instance, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetInstanceProcAddr( instance, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetInstanceProcAddr( instance, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_KHR_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetInstanceProcAddr( instance, "vkGetScreenBufferPropertiesQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2KHR" ) ); + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2KHR" ) ); + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2KHR" ) ); + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + } + + void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT + { + VkDevice device = static_cast( deviceCpp ); + + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + } + + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); + } + + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT + { + static DynamicLoader dl; + init( instance, device, dl ); + } + }; +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_android.h b/MacroLibX/third_party/vulkan/vulkan_android.h new file mode 100644 index 0000000..40b3c67 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_android.h @@ -0,0 +1,153 @@ +#ifndef VULKAN_ANDROID_H_ +#define VULKAN_ANDROID_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_android_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_android_surface 1 +struct ANativeWindow; +#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 +#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" +typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; +typedef struct VkAndroidSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; +} VkAndroidSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( + VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_ANDROID_external_memory_android_hardware_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_ANDROID_external_memory_android_hardware_buffer 1 +struct AHardwareBuffer; +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5 +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer" +typedef struct VkAndroidHardwareBufferUsageANDROID { + VkStructureType sType; + void* pNext; + uint64_t androidHardwareBufferUsage; +} VkAndroidHardwareBufferUsageANDROID; + +typedef struct VkAndroidHardwareBufferPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkAndroidHardwareBufferPropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatPropertiesANDROID; + +typedef struct VkImportAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + struct AHardwareBuffer* buffer; +} VkImportAndroidHardwareBufferInfoANDROID; + +typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkMemoryGetAndroidHardwareBufferInfoANDROID; + +typedef struct VkExternalFormatANDROID { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatANDROID; + +typedef struct VkAndroidHardwareBufferFormatProperties2ANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags2 formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatProperties2ANDROID; + +typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID( + VkDevice device, + const struct AHardwareBuffer* buffer, + VkAndroidHardwareBufferPropertiesANDROID* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID( + VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, + struct AHardwareBuffer** pBuffer); +#endif + + +// VK_ANDROID_external_format_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_ANDROID_external_format_resolve 1 +#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION 1 +#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME "VK_ANDROID_external_format_resolve" +typedef struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID { + VkStructureType sType; + void* pNext; + VkBool32 externalFormatResolve; +} VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; + +typedef struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID { + VkStructureType sType; + void* pNext; + VkBool32 nullColorAttachmentWithExternalFormatResolve; + VkChromaLocation externalFormatResolveChromaOffsetX; + VkChromaLocation externalFormatResolveChromaOffsetY; +} VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat colorAttachmentFormat; +} VkAndroidHardwareBufferFormatResolvePropertiesANDROID; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_beta.h b/MacroLibX/third_party/vulkan/vulkan_beta.h new file mode 100644 index 0000000..a6e5a46 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_beta.h @@ -0,0 +1,216 @@ +#ifndef VULKAN_BETA_H_ +#define VULKAN_BETA_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_portability_subset is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_portability_subset 1 +#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" +typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 constantAlphaColorBlendFactors; + VkBool32 events; + VkBool32 imageViewFormatReinterpretation; + VkBool32 imageViewFormatSwizzle; + VkBool32 imageView2DOn3DImage; + VkBool32 multisampleArrayImage; + VkBool32 mutableComparisonSamplers; + VkBool32 pointPolygons; + VkBool32 samplerMipLodBias; + VkBool32 separateStencilMaskRef; + VkBool32 shaderSampleRateInterpolationFunctions; + VkBool32 tessellationIsolines; + VkBool32 tessellationPointMode; + VkBool32 triangleFans; + VkBool32 vertexAttributeAccessBeyondStride; +} VkPhysicalDevicePortabilitySubsetFeaturesKHR; + +typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t minVertexInputBindingStrideAlignment; +} VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + + +// VK_AMDX_shader_enqueue is a preprocessor guard. Do not pass it to API calls. +#define VK_AMDX_shader_enqueue 1 +#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 1 +#define VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME "VK_AMDX_shader_enqueue" +#define VK_SHADER_INDEX_UNUSED_AMDX (~0U) +typedef struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX { + VkStructureType sType; + void* pNext; + VkBool32 shaderEnqueue; +} VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + +typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { + VkStructureType sType; + void* pNext; + uint32_t maxExecutionGraphDepth; + uint32_t maxExecutionGraphShaderOutputNodes; + uint32_t maxExecutionGraphShaderPayloadSize; + uint32_t maxExecutionGraphShaderPayloadCount; + uint32_t executionGraphDispatchAddressAlignment; +} VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + +typedef struct VkExecutionGraphPipelineScratchSizeAMDX { + VkStructureType sType; + void* pNext; + VkDeviceSize size; +} VkExecutionGraphPipelineScratchSizeAMDX; + +typedef struct VkExecutionGraphPipelineCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkExecutionGraphPipelineCreateInfoAMDX; + +typedef union VkDeviceOrHostAddressConstAMDX { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstAMDX; + +typedef struct VkDispatchGraphInfoAMDX { + uint32_t nodeIndex; + uint32_t payloadCount; + VkDeviceOrHostAddressConstAMDX payloads; + uint64_t payloadStride; +} VkDispatchGraphInfoAMDX; + +typedef struct VkDispatchGraphCountInfoAMDX { + uint32_t count; + VkDeviceOrHostAddressConstAMDX infos; + uint64_t stride; +} VkDispatchGraphCountInfoAMDX; + +typedef struct VkPipelineShaderStageNodeCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + const char* pName; + uint32_t index; +} VkPipelineShaderStageNodeCreateInfoAMDX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateExecutionGraphPipelinesAMDX)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); +typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateExecutionGraphPipelinesAMDX( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineScratchSizeAMDX( + VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineNodeIndexAMDX( + VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, + uint32_t* pNodeIndex); + +VKAPI_ATTR void VKAPI_CALL vkCmdInitializeGraphScratchMemoryAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectCountAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceAddress countInfo); +#endif + + +// VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_displacement_micromap 1 +#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 2 +#define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap" + +typedef enum VkDisplacementMicromapFormatNV { + VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV = 1, + VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV = 2, + VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV = 3, + VK_DISPLACEMENT_MICROMAP_FORMAT_MAX_ENUM_NV = 0x7FFFFFFF +} VkDisplacementMicromapFormatNV; +typedef struct VkPhysicalDeviceDisplacementMicromapFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 displacementMicromap; +} VkPhysicalDeviceDisplacementMicromapFeaturesNV; + +typedef struct VkPhysicalDeviceDisplacementMicromapPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDisplacementMicromapSubdivisionLevel; +} VkPhysicalDeviceDisplacementMicromapPropertiesNV; + +typedef struct VkAccelerationStructureTrianglesDisplacementMicromapNV { + VkStructureType sType; + void* pNext; + VkFormat displacementBiasAndScaleFormat; + VkFormat displacementVectorFormat; + VkDeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer; + VkDeviceSize displacementBiasAndScaleStride; + VkDeviceOrHostAddressConstKHR displacementVectorBuffer; + VkDeviceSize displacementVectorStride; + VkDeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags; + VkDeviceSize displacedMicromapPrimitiveFlagsStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexBuffer; + VkDeviceSize indexStride; + uint32_t baseTriangle; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkMicromapEXT micromap; +} VkAccelerationStructureTrianglesDisplacementMicromapNV; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_core.h b/MacroLibX/third_party/vulkan/vulkan_core.h new file mode 100644 index 0000000..9a870dd --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_core.h @@ -0,0 +1,19371 @@ +#ifndef VULKAN_CORE_H_ +#define VULKAN_CORE_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_VERSION_1_0 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_0 1 +#include "vk_platform.h" + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#ifndef VK_USE_64_BIT_PTR_DEFINES + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) || (defined(__riscv) && __riscv_xlen == 64) + #define VK_USE_64_BIT_PTR_DEFINES 1 + #else + #define VK_USE_64_BIT_PTR_DEFINES 0 + #endif +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #if (defined(__cplusplus) && (__cplusplus >= 201103L)) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L)) + #define VK_NULL_HANDLE nullptr + #else + #define VK_NULL_HANDLE ((void*)0) + #endif + #else + #define VK_NULL_HANDLE 0ULL + #endif +#endif +#ifndef VK_NULL_HANDLE + #define VK_NULL_HANDLE 0 +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; + #else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; + #endif +#endif + +#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ + ((((uint32_t)(variant)) << 29U) | (((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) + +// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_API_VERSION(0, 1, 0, 0) // Patch version should always be set to 0 + +// Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 + +// Version of this file +#define VK_HEADER_VERSION 274 + +// Complete version of this file +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) + +// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. +#define VK_MAKE_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) + +// DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22U) + +// DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) + +// DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) + +#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29U) +#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22U) & 0x7FU) +#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) +#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) +typedef uint32_t VkBool32; +typedef uint64_t VkDeviceAddress; +typedef uint64_t VkDeviceSize; +typedef uint32_t VkFlags; +typedef uint32_t VkSampleMask; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) +VK_DEFINE_HANDLE(VkInstance) +VK_DEFINE_HANDLE(VkPhysicalDevice) +VK_DEFINE_HANDLE(VkDevice) +VK_DEFINE_HANDLE(VkQueue) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) +VK_DEFINE_HANDLE(VkCommandBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) +#define VK_ATTACHMENT_UNUSED (~0U) +#define VK_FALSE 0U +#define VK_LOD_CLAMP_NONE 1000.0F +#define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_REMAINING_MIP_LEVELS (~0U) +#define VK_SUBPASS_EXTERNAL (~0U) +#define VK_TRUE 1U +#define VK_WHOLE_SIZE (~0ULL) +#define VK_MAX_MEMORY_TYPES 32U +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256U +#define VK_UUID_SIZE 16U +#define VK_MAX_EXTENSION_NAME_SIZE 256U +#define VK_MAX_DESCRIPTION_SIZE 256U +#define VK_MAX_MEMORY_HEAPS 16U + +typedef enum VkResult { + VK_SUCCESS = 0, + VK_NOT_READY = 1, + VK_TIMEOUT = 2, + VK_EVENT_SET = 3, + VK_EVENT_RESET = 4, + VK_INCOMPLETE = 5, + VK_ERROR_OUT_OF_HOST_MEMORY = -1, + VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, + VK_ERROR_INITIALIZATION_FAILED = -3, + VK_ERROR_DEVICE_LOST = -4, + VK_ERROR_MEMORY_MAP_FAILED = -5, + VK_ERROR_LAYER_NOT_PRESENT = -6, + VK_ERROR_EXTENSION_NOT_PRESENT = -7, + VK_ERROR_FEATURE_NOT_PRESENT = -8, + VK_ERROR_INCOMPATIBLE_DRIVER = -9, + VK_ERROR_TOO_MANY_OBJECTS = -10, + VK_ERROR_FORMAT_NOT_SUPPORTED = -11, + VK_ERROR_FRAGMENTED_POOL = -12, + VK_ERROR_UNKNOWN = -13, + VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000, + VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003, + VK_ERROR_FRAGMENTATION = -1000161000, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000, + VK_PIPELINE_COMPILE_REQUIRED = 1000297000, + VK_ERROR_SURFACE_LOST_KHR = -1000000000, + VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, + VK_SUBOPTIMAL_KHR = 1000001003, + VK_ERROR_OUT_OF_DATE_KHR = -1000001004, + VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, + VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, + VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR = -1000023000, + VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR = -1000023001, + VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR = -1000023002, + VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR = -1000023003, + VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR = -1000023004, + VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR = -1000023005, + VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, + VK_ERROR_NOT_PERMITTED_KHR = -1000174001, + VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, + VK_THREAD_IDLE_KHR = 1000268000, + VK_THREAD_DONE_KHR = 1000268001, + VK_OPERATION_DEFERRED_KHR = 1000268002, + VK_OPERATION_NOT_DEFERRED_KHR = 1000268003, + VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR = -1000299000, + VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000, + VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT = 1000482000, + VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, + VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, + VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED_KHR, + VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + VK_RESULT_MAX_ENUM = 0x7FFFFFFF +} VkResult; + +typedef enum VkStructureType { + VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, + VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, + VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, + VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, + VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, + VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, + VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, + VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, + VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, + VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, + VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, + VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, + VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, + VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, + VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, + VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, + VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, + VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES = 1000120000, + VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES = 49, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES = 50, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES = 51, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES = 52, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO = 1000147000, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2 = 1000109000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 = 1000109001, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2 = 1000109002, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2 = 1000109003, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2 = 1000109004, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO = 1000109005, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO = 1000109006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES = 1000177000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES = 1000196000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES = 1000180000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES = 1000082000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES = 1000197000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO = 1000161000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES = 1000161001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES = 1000161002, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO = 1000161003, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT = 1000161004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES = 1000199000, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE = 1000199001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES = 1000221000, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO = 1000246000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES = 1000130000, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO = 1000130001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES = 1000211000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES = 1000108000, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO = 1000108001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO = 1000108002, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO = 1000108003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES = 1000253000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES = 1000175000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES = 1000241000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT = 1000241001, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT = 1000241002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES = 1000261000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES = 1000207000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES = 1000207001, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO = 1000207002, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO = 1000207003, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO = 1000207004, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO = 1000207005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES = 1000257000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO = 1000244001, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO = 1000257002, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO = 1000257003, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO = 1000257004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES = 53, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES = 54, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO = 1000192000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES = 1000215000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES = 1000245000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES = 1000276000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES = 1000295000, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO = 1000295001, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO = 1000295002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES = 1000297000, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 = 1000314000, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2 = 1000314001, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2 = 1000314002, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO = 1000314003, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2 = 1000314004, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO = 1000314005, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO = 1000314006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES = 1000314007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES = 1000325000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES = 1000335000, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2 = 1000337000, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2 = 1000337001, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2 = 1000337002, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2 = 1000337003, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2 = 1000337004, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2 = 1000337005, + VK_STRUCTURE_TYPE_BUFFER_COPY_2 = 1000337006, + VK_STRUCTURE_TYPE_IMAGE_COPY_2 = 1000337007, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2 = 1000337008, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 = 1000337009, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2 = 1000337010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES = 1000225000, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO = 1000225001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES = 1000225002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES = 1000138000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES = 1000138001, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK = 1000138002, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO = 1000138003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES = 1000066000, + VK_STRUCTURE_TYPE_RENDERING_INFO = 1000044000, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO = 1000044001, + VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO = 1000044002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES = 1000044003, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO = 1000044004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES = 1000280000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES = 1000280001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES = 1000281001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3 = 1000360000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES = 1000413000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 1000413001, + VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS = 1000413002, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS = 1000413003, + VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, + VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, + VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009, + VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012, + VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, + VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, + VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, + VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, + VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR = 1000023000, + VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001, + VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR = 1000023002, + VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR = 1000023003, + VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR = 1000023004, + VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005, + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006, + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007, + VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008, + VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009, + VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010, + VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR = 1000023011, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR = 1000023012, + VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR = 1000023013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR = 1000023016, + VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR = 1000024001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR = 1000024002, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002, + VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000, + VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001, + VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002, + VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR = 1000038000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000038001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000038002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR = 1000038003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR = 1000038004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR = 1000038005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR = 1000038006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR = 1000038007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR = 1000038008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR = 1000038009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR = 1000038010, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR = 1000038011, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR = 1000038012, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000038013, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR = 1000039000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000039001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000039002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR = 1000039003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR = 1000039004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR = 1000039005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR = 1000039006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR = 1000039007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR = 1000039009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR = 1000039010, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR = 1000039011, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR = 1000039012, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR = 1000039013, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000039014, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR = 1000040000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR = 1000040001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR = 1000040003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000040004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000040005, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR = 1000040006, + VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, + VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, + VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, + VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001, + VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = 1000068000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = 1000068001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = 1000068002, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, + VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, + VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, + VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, + VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, + VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, + VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, + VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, + VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, + VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, + VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, + VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, + VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, + VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001, + VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG = 1000110000, + VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, + VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, + VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, + VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, + VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, + VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002, + VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003, + VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, + VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, + VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001, + VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004, + VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, + VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001, + VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002, + VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003, + VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX = 1000134000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX = 1000134001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX = 1000134002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX = 1000134003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, +#endif + VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, + VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, + VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, + VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000150007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR = 1000150009, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR = 1000150013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR = 1000150020, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR = 1000347000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR = 1000347001, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR = 1000348013, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT = 1000158006, + VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, + VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001, +#endif + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV = 1000164005, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV = 1000165000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000165001, + VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003, + VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004, + VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV = 1000165012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV = 1000166000, + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000, + VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, + VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000, + VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR = 1000187000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000187001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000187002, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR = 1000187003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR = 1000187004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR = 1000187005, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = 1000174000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = 1000388000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = 1000388001, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, + VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, + VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, + VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002, + VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003, + VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004, + VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000, + VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000, + VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001, + VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000, + VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT = 1000217000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, + VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, + VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000, + VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001, + VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002, + VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR = 1000248000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV = 1000250000, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV = 1000250001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT = 1000254000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT = 1000254001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT = 1000254002, + VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002, + VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001, + VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT = 1000256000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, + VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT = 1000270000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT = 1000270001, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT = 1000270002, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT = 1000270003, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT = 1000270004, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT = 1000270005, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT = 1000270006, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT = 1000270007, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT = 1000270008, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT = 1000270009, + VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR = 1000271000, + VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR = 1000271001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT = 1000274000, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT = 1000274001, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT = 1000274002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT = 1000275000, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT = 1000275001, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT = 1000275002, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT = 1000275003, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT = 1000275004, + VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT = 1000275005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000, + VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV = 1000278000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV = 1000278001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, + VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT = 1000283000, + VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT = 1000283001, + VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT = 1000283002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000, + VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001, + VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002, + VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV = 1000292000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV = 1000292001, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV = 1000292002, + VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR = 1000299002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR = 1000299003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR = 1000299004, + VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR = 1000299005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR = 1000299006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR = 1000299007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR = 1000299008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR = 1000299009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000299010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, + VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, + VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV = 1000307000, + VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV = 1000307001, + VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV = 1000307002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV = 1000307003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV = 1000307004, + VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV = 1000310000, + VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000, + VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001, + VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT = 1000311002, + VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT = 1000311003, + VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT = 1000311004, + VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT = 1000311005, + VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT = 1000311006, + VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT = 1000311007, + VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT = 1000311008, + VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009, + VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010, + VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT = 1000316000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT = 1000316001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT = 1000316002, + VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT = 1000316003, + VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT = 1000316004, + VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316005, + VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316006, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316007, + VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316008, + VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT = 1000316010, + VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT = 1000316011, + VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT = 1000316012, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT = 1000320000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT = 1000320001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT = 1000320002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD = 1000321000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR = 1000203000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR = 1000322000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV = 1000326002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV = 1000327000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV = 1000327001, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV = 1000327002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT = 1000328000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT = 1000328001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001, + VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000, + VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001, + VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT = 1000341000, + VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT = 1000341001, + VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT = 1000341002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000, + VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000, + VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001, + VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT = 1000354000, + VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT = 1000354001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, + VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, + VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365001, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA = 1000366000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA = 1000366001, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA = 1000366002, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA = 1000366003, + VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA = 1000366004, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA = 1000366005, + VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA = 1000366006, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA = 1000366007, + VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA = 1000366008, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA = 1000366009, + VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI = 1000369000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI = 1000369001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI = 1000369002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI = 1000370000, + VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV = 1000371000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV = 1000371001, + VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT = 1000372000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT = 1000372001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT = 1000375000, + VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT = 1000375001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT = 1000376000, + VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT = 1000376001, + VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT = 1000376002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000, + VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT = 1000382000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR = 1000386000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT = 1000391000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT = 1000391001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT = 1000395000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT = 1000395001, + VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000, + VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001, + VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002, + VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT = 1000396003, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT = 1000396004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT = 1000396005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT = 1000396006, + VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT = 1000396007, + VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT = 1000396008, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT = 1000396009, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV = 1000397000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV = 1000397001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV = 1000397002, +#endif + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI = 1000404000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI = 1000404001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI = 1000404002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000, + VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM = 1000415000, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM = 1000417000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM = 1000417001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM = 1000417002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT = 1000418000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT = 1000418001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE = 1000420001, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE = 1000420002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT = 1000421000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT = 1000422000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM = 1000424000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM = 1000424001, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM = 1000424002, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM = 1000424003, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM = 1000424004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM = 1000425000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM = 1000425001, + VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM = 1000425002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV = 1000426000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV = 1000426001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV = 1000427000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV = 1000427001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV = 1000428000, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV = 1000428001, + VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV = 1000428002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM = 1000440001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM = 1000440002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT = 1000451000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT = 1000451001, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT = 1000453000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT = 1000455000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT = 1000455001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT = 1000458000, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT = 1000458001, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000458002, + VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT = 1000458003, + VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG = 1000459000, + VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG = 1000459001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT = 1000462000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT = 1000462001, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002, + VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT = 1000462003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT = 1000342000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV = 1000464000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV = 1000464001, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV = 1000464002, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV = 1000464003, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV = 1000464004, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID = 1000468000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR = 1000470000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR = 1000470001, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR = 1000470003, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR = 1000470004, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR = 1000338002, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = 1000338003, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = 1000470005, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = 1000470006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR = 1000481000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001, + VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT = 1000482002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000, + VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000, + VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC = 1000485001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM = 1000488000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV = 1000490000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV = 1000490001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV = 1000492000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV = 1000492001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT = 1000351000, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002, + VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT = 1000496000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT = 1000498000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT = 1000499000, + VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV = 1000505000, + VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV = 1000505001, + VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV = 1000505002, + VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV = 1000505003, + VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV = 1000505004, + VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV = 1000505005, + VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV = 1000505006, + VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV = 1000505007, + VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV = 1000505008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR = 1000506000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM = 1000510000, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM = 1000510001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR = 1000515000, + VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR = 1000515001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV = 1000516000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM = 1000518000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM = 1000518001, + VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM = 1000518002, + VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM = 1000519000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM = 1000519001, + VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM = 1000519002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM = 1000520000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM = 1000520001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM = 1000521000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT = 1000524000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR = 1000525000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR = 1000190001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR = 1000190002, + VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX = 1000529000, + VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX = 1000529001, + VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX = 1000529002, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX = 1000529003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX = 1000529004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT = 1000530000, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR = 1000184000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR = 1000545000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR = 1000545001, + VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR = 1000545002, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR = 1000545003, + VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR = 1000545004, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR = 1000545005, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR = 1000545006, + VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT = 1000545007, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT = 1000545008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV = 1000546000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, + VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, + VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkStructureType; + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL = 1000314000, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL = 1000314001, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, + VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000, + VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001, + VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002, + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, + VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, + VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT = 1000295000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, + VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000, + VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001, + VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000, + VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, + VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, + VK_OBJECT_TYPE_CUDA_MODULE_NV = 1000307000, + VK_OBJECT_TYPE_CUDA_FUNCTION_NV = 1000307001, + VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000, + VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000, + VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000, + VK_OBJECT_TYPE_SHADER_EXT = 1000482000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef enum VkVendorId { + VK_VENDOR_ID_VIV = 0x10001, + VK_VENDOR_ID_VSI = 0x10002, + VK_VENDOR_ID_KAZAN = 0x10003, + VK_VENDOR_ID_CODEPLAY = 0x10004, + VK_VENDOR_ID_MESA = 0x10005, + VK_VENDOR_ID_POCL = 0x10006, + VK_VENDOR_ID_MOBILEYE = 0x10007, + VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF +} VkVendorId; + +typedef enum VkSystemAllocationScope { + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, + VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, + VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF +} VkSystemAllocationScope; + +typedef enum VkInternalAllocationType { + VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, + VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkInternalAllocationType; + +typedef enum VkFormat { + VK_FORMAT_UNDEFINED = 0, + VK_FORMAT_R4G4_UNORM_PACK8 = 1, + VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, + VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, + VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, + VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, + VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, + VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, + VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, + VK_FORMAT_R8_UNORM = 9, + VK_FORMAT_R8_SNORM = 10, + VK_FORMAT_R8_USCALED = 11, + VK_FORMAT_R8_SSCALED = 12, + VK_FORMAT_R8_UINT = 13, + VK_FORMAT_R8_SINT = 14, + VK_FORMAT_R8_SRGB = 15, + VK_FORMAT_R8G8_UNORM = 16, + VK_FORMAT_R8G8_SNORM = 17, + VK_FORMAT_R8G8_USCALED = 18, + VK_FORMAT_R8G8_SSCALED = 19, + VK_FORMAT_R8G8_UINT = 20, + VK_FORMAT_R8G8_SINT = 21, + VK_FORMAT_R8G8_SRGB = 22, + VK_FORMAT_R8G8B8_UNORM = 23, + VK_FORMAT_R8G8B8_SNORM = 24, + VK_FORMAT_R8G8B8_USCALED = 25, + VK_FORMAT_R8G8B8_SSCALED = 26, + VK_FORMAT_R8G8B8_UINT = 27, + VK_FORMAT_R8G8B8_SINT = 28, + VK_FORMAT_R8G8B8_SRGB = 29, + VK_FORMAT_B8G8R8_UNORM = 30, + VK_FORMAT_B8G8R8_SNORM = 31, + VK_FORMAT_B8G8R8_USCALED = 32, + VK_FORMAT_B8G8R8_SSCALED = 33, + VK_FORMAT_B8G8R8_UINT = 34, + VK_FORMAT_B8G8R8_SINT = 35, + VK_FORMAT_B8G8R8_SRGB = 36, + VK_FORMAT_R8G8B8A8_UNORM = 37, + VK_FORMAT_R8G8B8A8_SNORM = 38, + VK_FORMAT_R8G8B8A8_USCALED = 39, + VK_FORMAT_R8G8B8A8_SSCALED = 40, + VK_FORMAT_R8G8B8A8_UINT = 41, + VK_FORMAT_R8G8B8A8_SINT = 42, + VK_FORMAT_R8G8B8A8_SRGB = 43, + VK_FORMAT_B8G8R8A8_UNORM = 44, + VK_FORMAT_B8G8R8A8_SNORM = 45, + VK_FORMAT_B8G8R8A8_USCALED = 46, + VK_FORMAT_B8G8R8A8_SSCALED = 47, + VK_FORMAT_B8G8R8A8_UINT = 48, + VK_FORMAT_B8G8R8A8_SINT = 49, + VK_FORMAT_B8G8R8A8_SRGB = 50, + VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, + VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, + VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, + VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, + VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, + VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, + VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, + VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, + VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, + VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, + VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, + VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, + VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, + VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, + VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, + VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, + VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, + VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, + VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000, + VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006, + VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016, + VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026, + VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027, + VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM = 1000330000, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16 = 1000330001, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16 = 1000330002, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM = 1000330003, + VK_FORMAT_A4R4G4B4_UNORM_PACK16 = 1000340000, + VK_FORMAT_A4B4G4R4_UNORM_PACK16 = 1000340001, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK = 1000066000, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK = 1000066001, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK = 1000066002, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK = 1000066003, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK = 1000066004, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK = 1000066005, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK = 1000066006, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK = 1000066007, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK = 1000066008, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK = 1000066009, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK = 1000066010, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK = 1000066011, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK = 1000066012, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK = 1000066013, + VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, + VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, + VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, + VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, + VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, + VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, + VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, + VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, + VK_FORMAT_R16G16_S10_5_NV = 1000464000, + VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = 1000470000, + VK_FORMAT_A8_UNORM_KHR = 1000470001, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, + VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, + VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM, + VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, + VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16, + VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + VK_FORMAT_MAX_ENUM = 0x7FFFFFFF +} VkFormat; + +typedef enum VkImageTiling { + VK_IMAGE_TILING_OPTIMAL = 0, + VK_IMAGE_TILING_LINEAR = 1, + VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000, + VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF +} VkImageTiling; + +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + +typedef enum VkPhysicalDeviceType { + VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, + VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, + VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, + VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, + VK_PHYSICAL_DEVICE_TYPE_CPU = 4, + VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkPhysicalDeviceType; + +typedef enum VkQueryType { + VK_QUERY_TYPE_OCCLUSION = 0, + VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, + VK_QUERY_TYPE_TIMESTAMP = 2, + VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000, + VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, + VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, + VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, + VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR = 1000299000, + VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT = 1000328000, + VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT = 1000382000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR = 1000386000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR = 1000386001, + VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT = 1000396000, + VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT = 1000396001, + VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkQueryType; + +typedef enum VkSharingMode { + VK_SHARING_MODE_EXCLUSIVE = 0, + VK_SHARING_MODE_CONCURRENT = 1, + VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSharingMode; + +typedef enum VkComponentSwizzle { + VK_COMPONENT_SWIZZLE_IDENTITY = 0, + VK_COMPONENT_SWIZZLE_ZERO = 1, + VK_COMPONENT_SWIZZLE_ONE = 2, + VK_COMPONENT_SWIZZLE_R = 3, + VK_COMPONENT_SWIZZLE_G = 4, + VK_COMPONENT_SWIZZLE_B = 5, + VK_COMPONENT_SWIZZLE_A = 6, + VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF +} VkComponentSwizzle; + +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; + +typedef enum VkBlendFactor { + VK_BLEND_FACTOR_ZERO = 0, + VK_BLEND_FACTOR_ONE = 1, + VK_BLEND_FACTOR_SRC_COLOR = 2, + VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, + VK_BLEND_FACTOR_DST_COLOR = 4, + VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, + VK_BLEND_FACTOR_SRC_ALPHA = 6, + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, + VK_BLEND_FACTOR_DST_ALPHA = 8, + VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, + VK_BLEND_FACTOR_CONSTANT_COLOR = 10, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, + VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, + VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, + VK_BLEND_FACTOR_SRC1_COLOR = 15, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, + VK_BLEND_FACTOR_SRC1_ALPHA = 17, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, + VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF +} VkBlendFactor; + +typedef enum VkBlendOp { + VK_BLEND_OP_ADD = 0, + VK_BLEND_OP_SUBTRACT = 1, + VK_BLEND_OP_REVERSE_SUBTRACT = 2, + VK_BLEND_OP_MIN = 3, + VK_BLEND_OP_MAX = 4, + VK_BLEND_OP_ZERO_EXT = 1000148000, + VK_BLEND_OP_SRC_EXT = 1000148001, + VK_BLEND_OP_DST_EXT = 1000148002, + VK_BLEND_OP_SRC_OVER_EXT = 1000148003, + VK_BLEND_OP_DST_OVER_EXT = 1000148004, + VK_BLEND_OP_SRC_IN_EXT = 1000148005, + VK_BLEND_OP_DST_IN_EXT = 1000148006, + VK_BLEND_OP_SRC_OUT_EXT = 1000148007, + VK_BLEND_OP_DST_OUT_EXT = 1000148008, + VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, + VK_BLEND_OP_DST_ATOP_EXT = 1000148010, + VK_BLEND_OP_XOR_EXT = 1000148011, + VK_BLEND_OP_MULTIPLY_EXT = 1000148012, + VK_BLEND_OP_SCREEN_EXT = 1000148013, + VK_BLEND_OP_OVERLAY_EXT = 1000148014, + VK_BLEND_OP_DARKEN_EXT = 1000148015, + VK_BLEND_OP_LIGHTEN_EXT = 1000148016, + VK_BLEND_OP_COLORDODGE_EXT = 1000148017, + VK_BLEND_OP_COLORBURN_EXT = 1000148018, + VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, + VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, + VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, + VK_BLEND_OP_EXCLUSION_EXT = 1000148022, + VK_BLEND_OP_INVERT_EXT = 1000148023, + VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, + VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, + VK_BLEND_OP_LINEARBURN_EXT = 1000148026, + VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, + VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, + VK_BLEND_OP_PINLIGHT_EXT = 1000148029, + VK_BLEND_OP_HARDMIX_EXT = 1000148030, + VK_BLEND_OP_HSL_HUE_EXT = 1000148031, + VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, + VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, + VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, + VK_BLEND_OP_PLUS_EXT = 1000148035, + VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, + VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, + VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, + VK_BLEND_OP_MINUS_EXT = 1000148039, + VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, + VK_BLEND_OP_CONTRAST_EXT = 1000148041, + VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, + VK_BLEND_OP_RED_EXT = 1000148043, + VK_BLEND_OP_GREEN_EXT = 1000148044, + VK_BLEND_OP_BLUE_EXT = 1000148045, + VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF +} VkBlendOp; + +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + +typedef enum VkDynamicState { + VK_DYNAMIC_STATE_VIEWPORT = 0, + VK_DYNAMIC_STATE_SCISSOR = 1, + VK_DYNAMIC_STATE_LINE_WIDTH = 2, + VK_DYNAMIC_STATE_DEPTH_BIAS = 3, + VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, + VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, + VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, + VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, + VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_CULL_MODE = 1000267000, + VK_DYNAMIC_STATE_FRONT_FACE = 1000267001, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY = 1000267002, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT = 1000267003, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT = 1000267004, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE = 1000267005, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE = 1000267006, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE = 1000267007, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP = 1000267008, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE = 1000267009, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE = 1000267010, + VK_DYNAMIC_STATE_STENCIL_OP = 1000267011, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE = 1000377001, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE = 1000377002, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE = 1000377004, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT = 1000099001, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT = 1000099002, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000, + VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004, + VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV = 1000205000, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, + VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000, + VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000, + VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000, + VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000, + VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003, + VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000, + VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT = 1000455002, + VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT = 1000455003, + VK_DYNAMIC_STATE_POLYGON_MODE_EXT = 1000455004, + VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT = 1000455005, + VK_DYNAMIC_STATE_SAMPLE_MASK_EXT = 1000455006, + VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT = 1000455007, + VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT = 1000455008, + VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT = 1000455009, + VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT = 1000455010, + VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT = 1000455011, + VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT = 1000455012, + VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT = 1000455013, + VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT = 1000455014, + VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT = 1000455015, + VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT = 1000455016, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT = 1000455017, + VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT = 1000455018, + VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT = 1000455019, + VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT = 1000455020, + VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT = 1000455021, + VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT = 1000455022, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV = 1000455023, + VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV = 1000455024, + VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV = 1000455025, + VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV = 1000455026, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV = 1000455027, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV = 1000455028, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV = 1000455029, + VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV = 1000455030, + VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031, + VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032, + VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT = 1000524000, + VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE, + VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, + VK_DYNAMIC_STATE_STENCIL_OP_EXT = VK_DYNAMIC_STATE_STENCIL_OP, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF +} VkDynamicState; + +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003, + VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004, + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_CUBIC_EXT = 1000015000, + VK_FILTER_CUBIC_IMG = VK_FILTER_CUBIC_EXT, + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + +typedef enum VkDescriptorType { + VK_DESCRIPTOR_TYPE_SAMPLER = 0, + VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, + VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, + VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, + VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, + VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, + VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK = 1000138000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM = 1000440000, + VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM = 1000440001, + VK_DESCRIPTOR_TYPE_MUTABLE_EXT = 1000351000, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, + VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, + VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorType; + +typedef enum VkAttachmentLoadOp { + VK_ATTACHMENT_LOAD_OP_LOAD = 0, + VK_ATTACHMENT_LOAD_OP_CLEAR = 1, + VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, + VK_ATTACHMENT_LOAD_OP_NONE_EXT = 1000400000, + VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentLoadOp; + +typedef enum VkAttachmentStoreOp { + VK_ATTACHMENT_STORE_OP_STORE = 0, + VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, + VK_ATTACHMENT_STORE_OP_NONE = 1000301000, + VK_ATTACHMENT_STORE_OP_NONE_KHR = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_NONE_QCOM = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_NONE_EXT = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentStoreOp; + +typedef enum VkPipelineBindPoint { + VK_PIPELINE_BIND_POINT_GRAPHICS = 0, + VK_PIPELINE_BIND_POINT_COMPUTE = 1, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX = 1000134000, +#endif + VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, + VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003, + VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF +} VkPipelineBindPoint; + +typedef enum VkCommandBufferLevel { + VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, + VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, + VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferLevel; + +typedef enum VkIndexType { + VK_INDEX_TYPE_UINT16 = 0, + VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_NONE_KHR = 1000165000, + VK_INDEX_TYPE_UINT8_EXT = 1000265000, + VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, + VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkIndexType; + +typedef enum VkSubpassContents { + VK_SUBPASS_CONTENTS_INLINE = 0, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT = 1000451000, + VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassContents; + +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_NONE = 0, + VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, + VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000, + VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, + VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000, + VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + VK_ACCESS_NONE_KHR = VK_ACCESS_NONE, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; + +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_NONE = 0, + VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, + VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, + VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, + VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_NONE_KHR = VK_IMAGE_ASPECT_NONE, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; + +typedef enum VkFormatFeatureFlagBits { + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, + VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, + VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, + VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, + VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, + VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, + VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, + VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000, + VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, + VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000, + VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000, + VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000, + VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, + VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000, + VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000, + VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFormatFeatureFlagBits; +typedef VkFlags VkFormatFeatureFlags; + +typedef enum VkImageCreateFlagBits { + VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, + VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, + VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100, + VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800, + VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200, + VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000, + VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, + VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000, + VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00010000, + VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT = 0x00040000, + VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT = 0x00020000, + VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = 0x00008000, + VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00100000, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT, + VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT, + VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageCreateFlagBits; +typedef VkFlags VkImageCreateFlags; + +typedef enum VkSampleCountFlagBits { + VK_SAMPLE_COUNT_1_BIT = 0x00000001, + VK_SAMPLE_COUNT_2_BIT = 0x00000002, + VK_SAMPLE_COUNT_4_BIT = 0x00000004, + VK_SAMPLE_COUNT_8_BIT = 0x00000008, + VK_SAMPLE_COUNT_16_BIT = 0x00000010, + VK_SAMPLE_COUNT_32_BIT = 0x00000020, + VK_SAMPLE_COUNT_64_BIT = 0x00000040, + VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSampleCountFlagBits; +typedef VkFlags VkSampleCountFlags; + +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, + VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400, + VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800, + VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, + VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, + VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100, + VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT = 0x00400000, + VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, + VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000, + VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000, + VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x00080000, + VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000, + VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000, + VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000, + VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; + +typedef enum VkInstanceCreateFlagBits { + VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR = 0x00000001, + VK_INSTANCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkInstanceCreateFlagBits; +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; + +typedef enum VkMemoryPropertyFlagBits { + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, + VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, + VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, + VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020, + VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD = 0x00000040, + VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD = 0x00000080, + VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV = 0x00000100, + VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryPropertyFlagBits; +typedef VkFlags VkMemoryPropertyFlags; + +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, + VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020, + VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040, + VK_QUEUE_OPTICAL_FLOW_BIT_NV = 0x00000100, + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; +typedef VkFlags VkDeviceCreateFlags; + +typedef enum VkDeviceQueueCreateFlagBits { + VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001, + VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDeviceQueueCreateFlagBits; +typedef VkFlags VkDeviceQueueCreateFlags; + +typedef enum VkPipelineStageFlagBits { + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, + VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, + VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, + VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, + VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, + VK_PIPELINE_STAGE_NONE = 0, + VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, + VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, + VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000, + VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000, + VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT = 0x00080000, + VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT = 0x00100000, + VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, + VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, + VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE, + VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineStageFlagBits; +typedef VkFlags VkPipelineStageFlags; +typedef VkFlags VkMemoryMapFlags; + +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; + +typedef enum VkSparseImageFormatFlagBits { + VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, + VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, + VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, + VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseImageFormatFlagBits; +typedef VkFlags VkSparseImageFormatFlags; + +typedef enum VkFenceCreateFlagBits { + VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, + VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceCreateFlagBits; +typedef VkFlags VkFenceCreateFlags; +typedef VkFlags VkSemaphoreCreateFlags; + +typedef enum VkEventCreateFlagBits { + VK_EVENT_CREATE_DEVICE_ONLY_BIT = 0x00000001, + VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT, + VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkEventCreateFlagBits; +typedef VkFlags VkEventCreateFlags; + +typedef enum VkQueryPipelineStatisticFlagBits { + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, + VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, + VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, + VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, + VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT = 0x00000800, + VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT = 0x00001000, + VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI = 0x00002000, + VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryPipelineStatisticFlagBits; +typedef VkFlags VkQueryPipelineStatisticFlags; +typedef VkFlags VkQueryPoolCreateFlags; + +typedef enum VkQueryResultFlagBits { + VK_QUERY_RESULT_64_BIT = 0x00000001, + VK_QUERY_RESULT_WAIT_BIT = 0x00000002, + VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, + VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, + VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010, + VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryResultFlagBits; +typedef VkFlags VkQueryResultFlags; + +typedef enum VkBufferCreateFlagBits { + VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010, + VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000020, + VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00000040, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferCreateFlagBits; +typedef VkFlags VkBufferCreateFlags; + +typedef enum VkBufferUsageFlagBits { + VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, + VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, + VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, + VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, + VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000, + VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000, + VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, + VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000, +#endif + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, + VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, + VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000, + VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000, + VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000, + VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000, + VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000, + VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000, + VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT = 0x01000000, + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferUsageFlagBits; +typedef VkFlags VkBufferUsageFlags; +typedef VkFlags VkBufferViewCreateFlags; + +typedef enum VkImageViewCreateFlagBits { + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001, + VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000004, + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002, + VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageViewCreateFlagBits; +typedef VkFlags VkImageViewCreateFlags; +typedef VkFlags VkShaderModuleCreateFlags; + +typedef enum VkPipelineCacheCreateFlagBits { + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, + VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheCreateFlagBits; +typedef VkFlags VkPipelineCacheCreateFlags; + +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; + +typedef enum VkPipelineCreateFlagBits { + VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, + VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, + VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, + VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, + VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, + VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, + VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, + VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, + VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800, + VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000, + VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000, + VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400, + VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000, + VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000, + VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000, + VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000, +#endif + VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000, + VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000, + VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, + VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreateFlagBits; +typedef VkFlags VkPipelineCreateFlags; + +typedef enum VkPipelineShaderStageCreateFlagBits { + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT = 0x00000001, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT = 0x00000002, + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, + VK_PIPELINE_SHADER_STAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineShaderStageCreateFlagBits; +typedef VkFlags VkPipelineShaderStageCreateFlags; + +typedef enum VkShaderStageFlagBits { + VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, + VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, + VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, + VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, + VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, + VK_SHADER_STAGE_ALL = 0x7FFFFFFF, + VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100, + VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400, + VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800, + VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000, + VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000, + VK_SHADER_STAGE_TASK_BIT_EXT = 0x00000040, + VK_SHADER_STAGE_MESH_BIT_EXT = 0x00000080, + VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI = 0x00004000, + VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI = 0x00080000, + VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR, + VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + VK_SHADER_STAGE_TASK_BIT_NV = VK_SHADER_STAGE_TASK_BIT_EXT, + VK_SHADER_STAGE_MESH_BIT_NV = VK_SHADER_STAGE_MESH_BIT_EXT, + VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkShaderStageFlagBits; + +typedef enum VkCullModeFlagBits { + VK_CULL_MODE_NONE = 0, + VK_CULL_MODE_FRONT_BIT = 0x00000001, + VK_CULL_MODE_BACK_BIT = 0x00000002, + VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, + VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCullModeFlagBits; +typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; +typedef VkFlags VkPipelineMultisampleStateCreateFlags; + +typedef enum VkPipelineDepthStencilStateCreateFlagBits { + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000001, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000002, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineDepthStencilStateCreateFlagBits; +typedef VkFlags VkPipelineDepthStencilStateCreateFlags; + +typedef enum VkPipelineColorBlendStateCreateFlagBits { + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT = 0x00000001, + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT, + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineColorBlendStateCreateFlagBits; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; +typedef VkFlags VkPipelineDynamicStateCreateFlags; + +typedef enum VkPipelineLayoutCreateFlagBits { + VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT = 0x00000002, + VK_PIPELINE_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineLayoutCreateFlagBits; +typedef VkFlags VkPipelineLayoutCreateFlags; +typedef VkFlags VkShaderStageFlags; + +typedef enum VkSamplerCreateFlagBits { + VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001, + VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002, + VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008, + VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT = 0x00000004, + VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM = 0x00000010, + VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSamplerCreateFlagBits; +typedef VkFlags VkSamplerCreateFlags; + +typedef enum VkDescriptorPoolCreateFlagBits { + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV = 0x00000008, + VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV = 0x00000010, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT, + VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorPoolCreateFlagBits; +typedef VkFlags VkDescriptorPoolCreateFlags; +typedef VkFlags VkDescriptorPoolResetFlags; + +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00000080, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV = 0x00000040, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; + +typedef enum VkAttachmentDescriptionFlagBits { + VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, + VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentDescriptionFlagBits; +typedef VkFlags VkAttachmentDescriptionFlags; + +typedef enum VkDependencyFlagBits { + VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, + VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, + VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002, + VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT = 0x00000008, + VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT, + VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDependencyFlagBits; +typedef VkFlags VkDependencyFlags; + +typedef enum VkFramebufferCreateFlagBits { + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001, + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFramebufferCreateFlagBits; +typedef VkFlags VkFramebufferCreateFlags; + +typedef enum VkRenderPassCreateFlagBits { + VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002, + VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderPassCreateFlagBits; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004, + VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT = 0x00000010, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000020, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000040, + VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000080, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + +typedef enum VkCommandPoolCreateFlagBits { + VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, + VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, + VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004, + VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolCreateFlagBits; +typedef VkFlags VkCommandPoolCreateFlags; + +typedef enum VkCommandPoolResetFlagBits { + VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolResetFlagBits; +typedef VkFlags VkCommandPoolResetFlags; + +typedef enum VkCommandBufferUsageFlagBits { + VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, + VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, + VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, + VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferUsageFlagBits; +typedef VkFlags VkCommandBufferUsageFlags; + +typedef enum VkQueryControlFlagBits { + VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, + VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryControlFlagBits; +typedef VkFlags VkQueryControlFlags; + +typedef enum VkCommandBufferResetFlagBits { + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferResetFlagBits; +typedef VkFlags VkCommandBufferResetFlags; + +typedef enum VkStencilFaceFlagBits { + VK_STENCIL_FACE_FRONT_BIT = 0x00000001, + VK_STENCIL_FACE_BACK_BIT = 0x00000002, + VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003, + VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK, + VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkStencilFaceFlagBits; +typedef VkFlags VkStencilFaceFlags; +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkBaseInStructure { + VkStructureType sType; + const struct VkBaseInStructure* pNext; +} VkBaseInStructure; + +typedef struct VkBaseOutStructure { + VkStructureType sType; + struct VkBaseOutStructure* pNext; +} VkBaseOutStructure; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef struct VkPipelineCacheHeaderVersionOne { + uint32_t headerSize; + VkPipelineCacheHeaderVersion headerVersion; + uint32_t vendorID; + uint32_t deviceID; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; +} VkPipelineCacheHeaderVersionOne; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + +typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); +typedef struct VkAllocationCallbacks { + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; +} VkAllocationCallbacks; + +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + +typedef struct VkPhysicalDeviceFeatures { + VkBool32 robustBufferAccess; + VkBool32 fullDrawIndexUint32; + VkBool32 imageCubeArray; + VkBool32 independentBlend; + VkBool32 geometryShader; + VkBool32 tessellationShader; + VkBool32 sampleRateShading; + VkBool32 dualSrcBlend; + VkBool32 logicOp; + VkBool32 multiDrawIndirect; + VkBool32 drawIndirectFirstInstance; + VkBool32 depthClamp; + VkBool32 depthBiasClamp; + VkBool32 fillModeNonSolid; + VkBool32 depthBounds; + VkBool32 wideLines; + VkBool32 largePoints; + VkBool32 alphaToOne; + VkBool32 multiViewport; + VkBool32 samplerAnisotropy; + VkBool32 textureCompressionETC2; + VkBool32 textureCompressionASTC_LDR; + VkBool32 textureCompressionBC; + VkBool32 occlusionQueryPrecise; + VkBool32 pipelineStatisticsQuery; + VkBool32 vertexPipelineStoresAndAtomics; + VkBool32 fragmentStoresAndAtomics; + VkBool32 shaderTessellationAndGeometryPointSize; + VkBool32 shaderImageGatherExtended; + VkBool32 shaderStorageImageExtendedFormats; + VkBool32 shaderStorageImageMultisample; + VkBool32 shaderStorageImageReadWithoutFormat; + VkBool32 shaderStorageImageWriteWithoutFormat; + VkBool32 shaderUniformBufferArrayDynamicIndexing; + VkBool32 shaderSampledImageArrayDynamicIndexing; + VkBool32 shaderStorageBufferArrayDynamicIndexing; + VkBool32 shaderStorageImageArrayDynamicIndexing; + VkBool32 shaderClipDistance; + VkBool32 shaderCullDistance; + VkBool32 shaderFloat64; + VkBool32 shaderInt64; + VkBool32 shaderInt16; + VkBool32 shaderResourceResidency; + VkBool32 shaderResourceMinLod; + VkBool32 sparseBinding; + VkBool32 sparseResidencyBuffer; + VkBool32 sparseResidencyImage2D; + VkBool32 sparseResidencyImage3D; + VkBool32 sparseResidency2Samples; + VkBool32 sparseResidency4Samples; + VkBool32 sparseResidency8Samples; + VkBool32 sparseResidency16Samples; + VkBool32 sparseResidencyAliased; + VkBool32 variableMultisampleRate; + VkBool32 inheritedQueries; +} VkPhysicalDeviceFeatures; + +typedef struct VkPhysicalDeviceLimits { + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + VkDeviceSize bufferImageGranularity; + VkDeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + VkDeviceSize minTexelBufferOffsetAlignment; + VkDeviceSize minUniformBufferOffsetAlignment; + VkDeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + VkSampleCountFlags framebufferColorSampleCounts; + VkSampleCountFlags framebufferDepthSampleCounts; + VkSampleCountFlags framebufferStencilSampleCounts; + VkSampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + VkSampleCountFlags sampledImageColorSampleCounts; + VkSampleCountFlags sampledImageIntegerSampleCounts; + VkSampleCountFlags sampledImageDepthSampleCounts; + VkSampleCountFlags sampledImageStencilSampleCounts; + VkSampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + VkBool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + VkBool32 strictLines; + VkBool32 standardSampleLocations; + VkDeviceSize optimalBufferCopyOffsetAlignment; + VkDeviceSize optimalBufferCopyRowPitchAlignment; + VkDeviceSize nonCoherentAtomSize; +} VkPhysicalDeviceLimits; + +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + +typedef struct VkPhysicalDeviceSparseProperties { + VkBool32 residencyStandard2DBlockShape; + VkBool32 residencyStandard2DMultisampleBlockShape; + VkBool32 residencyStandard3DBlockShape; + VkBool32 residencyAlignedMipSize; + VkBool32 residencyNonResidentStrict; +} VkPhysicalDeviceSparseProperties; + +typedef struct VkPhysicalDeviceProperties { + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + VkPhysicalDeviceLimits limits; + VkPhysicalDeviceSparseProperties sparseProperties; +} VkPhysicalDeviceProperties; + +typedef struct VkQueueFamilyProperties { + VkQueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + VkExtent3D minImageTransferGranularity; +} VkQueueFamilyProperties; + +typedef struct VkDeviceQueueCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; +} VkDeviceQueueCreateInfo; + +typedef struct VkDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const VkDeviceQueueCreateInfo* pQueueCreateInfos; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const VkPhysicalDeviceFeatures* pEnabledFeatures; +} VkDeviceCreateInfo; + +typedef struct VkExtensionProperties { + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; +} VkExtensionProperties; + +typedef struct VkLayerProperties { + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkLayerProperties; + +typedef struct VkSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + const VkPipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const VkCommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkSubmitInfo; + +typedef struct VkMappedMemoryRange { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMappedMemoryRange; + +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + +typedef struct VkMemoryRequirements { + VkDeviceSize size; + VkDeviceSize alignment; + uint32_t memoryTypeBits; +} VkMemoryRequirements; + +typedef struct VkSparseMemoryBind { + VkDeviceSize resourceOffset; + VkDeviceSize size; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseMemoryBind; + +typedef struct VkSparseBufferMemoryBindInfo { + VkBuffer buffer; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseBufferMemoryBindInfo; + +typedef struct VkSparseImageOpaqueMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseImageOpaqueMemoryBindInfo; + +typedef struct VkImageSubresource { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; +} VkImageSubresource; + +typedef struct VkSparseImageMemoryBind { + VkImageSubresource subresource; + VkOffset3D offset; + VkExtent3D extent; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseImageMemoryBind; + +typedef struct VkSparseImageMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseImageMemoryBind* pBinds; +} VkSparseImageMemoryBindInfo; + +typedef struct VkBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const VkSparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const VkSparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkBindSparseInfo; + +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + +typedef struct VkFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkFenceCreateFlags flags; +} VkFenceCreateInfo; + +typedef struct VkSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreCreateFlags flags; +} VkSemaphoreCreateInfo; + +typedef struct VkEventCreateInfo { + VkStructureType sType; + const void* pNext; + VkEventCreateFlags flags; +} VkEventCreateInfo; + +typedef struct VkQueryPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueryPoolCreateFlags flags; + VkQueryType queryType; + uint32_t queryCount; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkQueryPoolCreateInfo; + +typedef struct VkBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkDeviceSize size; + VkBufferUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkBufferCreateInfo; + +typedef struct VkBufferViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferViewCreateFlags flags; + VkBuffer buffer; + VkFormat format; + VkDeviceSize offset; + VkDeviceSize range; +} VkBufferViewCreateInfo; + +typedef struct VkImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageType imageType; + VkFormat format; + VkExtent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + VkSampleCountFlagBits samples; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkImageLayout initialLayout; +} VkImageCreateInfo; + +typedef struct VkSubresourceLayout { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize rowPitch; + VkDeviceSize arrayPitch; + VkDeviceSize depthPitch; +} VkSubresourceLayout; + +typedef struct VkComponentMapping { + VkComponentSwizzle r; + VkComponentSwizzle g; + VkComponentSwizzle b; + VkComponentSwizzle a; +} VkComponentMapping; + +typedef struct VkImageViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageViewCreateFlags flags; + VkImage image; + VkImageViewType viewType; + VkFormat format; + VkComponentMapping components; + VkImageSubresourceRange subresourceRange; +} VkImageViewCreateInfo; + +typedef struct VkShaderModuleCreateInfo { + VkStructureType sType; + const void* pNext; + VkShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; +} VkShaderModuleCreateInfo; + +typedef struct VkPipelineCacheCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; +} VkPipelineCacheCreateInfo; + +typedef struct VkSpecializationMapEntry { + uint32_t constantID; + uint32_t offset; + size_t size; +} VkSpecializationMapEntry; + +typedef struct VkSpecializationInfo { + uint32_t mapEntryCount; + const VkSpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; +} VkSpecializationInfo; + +typedef struct VkPipelineShaderStageCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineShaderStageCreateFlags flags; + VkShaderStageFlagBits stage; + VkShaderModule module; + const char* pName; + const VkSpecializationInfo* pSpecializationInfo; +} VkPipelineShaderStageCreateInfo; + +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; +} VkVertexInputBindingDescription; + +typedef struct VkVertexInputAttributeDescription { + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription; + +typedef struct VkPipelineVertexInputStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VkVertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; +} VkPipelineVertexInputStateCreateInfo; + +typedef struct VkPipelineInputAssemblyStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineInputAssemblyStateCreateFlags flags; + VkPrimitiveTopology topology; + VkBool32 primitiveRestartEnable; +} VkPipelineInputAssemblyStateCreateInfo; + +typedef struct VkPipelineTessellationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; +} VkPipelineTessellationStateCreateInfo; + +typedef struct VkViewport { + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; +} VkViewport; + +typedef struct VkPipelineViewportStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const VkViewport* pViewports; + uint32_t scissorCount; + const VkRect2D* pScissors; +} VkPipelineViewportStateCreateInfo; + +typedef struct VkPipelineRasterizationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateCreateFlags flags; + VkBool32 depthClampEnable; + VkBool32 rasterizerDiscardEnable; + VkPolygonMode polygonMode; + VkCullModeFlags cullMode; + VkFrontFace frontFace; + VkBool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; +} VkPipelineRasterizationStateCreateInfo; + +typedef struct VkPipelineMultisampleStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineMultisampleStateCreateFlags flags; + VkSampleCountFlagBits rasterizationSamples; + VkBool32 sampleShadingEnable; + float minSampleShading; + const VkSampleMask* pSampleMask; + VkBool32 alphaToCoverageEnable; + VkBool32 alphaToOneEnable; +} VkPipelineMultisampleStateCreateInfo; + +typedef struct VkStencilOpState { + VkStencilOp failOp; + VkStencilOp passOp; + VkStencilOp depthFailOp; + VkCompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; +} VkStencilOpState; + +typedef struct VkPipelineDepthStencilStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDepthStencilStateCreateFlags flags; + VkBool32 depthTestEnable; + VkBool32 depthWriteEnable; + VkCompareOp depthCompareOp; + VkBool32 depthBoundsTestEnable; + VkBool32 stencilTestEnable; + VkStencilOpState front; + VkStencilOpState back; + float minDepthBounds; + float maxDepthBounds; +} VkPipelineDepthStencilStateCreateInfo; + +typedef struct VkPipelineColorBlendAttachmentState { + VkBool32 blendEnable; + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; + VkColorComponentFlags colorWriteMask; +} VkPipelineColorBlendAttachmentState; + +typedef struct VkPipelineColorBlendStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineColorBlendStateCreateFlags flags; + VkBool32 logicOpEnable; + VkLogicOp logicOp; + uint32_t attachmentCount; + const VkPipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; +} VkPipelineColorBlendStateCreateInfo; + +typedef struct VkPipelineDynamicStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const VkDynamicState* pDynamicStates; +} VkPipelineDynamicStateCreateInfo; + +typedef struct VkGraphicsPipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; + const VkPipelineViewportStateCreateInfo* pViewportState; + const VkPipelineRasterizationStateCreateInfo* pRasterizationState; + const VkPipelineMultisampleStateCreateInfo* pMultisampleState; + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; + const VkPipelineColorBlendStateCreateInfo* pColorBlendState; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkRenderPass renderPass; + uint32_t subpass; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkGraphicsPipelineCreateInfo; + +typedef struct VkPushConstantRange { + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; +} VkPushConstantRange; + +typedef struct VkPipelineLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkPipelineLayoutCreateInfo; + +typedef struct VkSamplerCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerCreateFlags flags; + VkFilter magFilter; + VkFilter minFilter; + VkSamplerMipmapMode mipmapMode; + VkSamplerAddressMode addressModeU; + VkSamplerAddressMode addressModeV; + VkSamplerAddressMode addressModeW; + float mipLodBias; + VkBool32 anisotropyEnable; + float maxAnisotropy; + VkBool32 compareEnable; + VkCompareOp compareOp; + float minLod; + float maxLod; + VkBorderColor borderColor; + VkBool32 unnormalizedCoordinates; +} VkSamplerCreateInfo; + +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; + +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; + +typedef struct VkDescriptorPoolSize { + VkDescriptorType type; + uint32_t descriptorCount; +} VkDescriptorPoolSize; + +typedef struct VkDescriptorPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const VkDescriptorPoolSize* pPoolSizes; +} VkDescriptorPoolCreateInfo; + +typedef struct VkDescriptorSetAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkDescriptorSetAllocateInfo; + +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; + +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; + +typedef struct VkWriteDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + const VkDescriptorImageInfo* pImageInfo; + const VkDescriptorBufferInfo* pBufferInfo; + const VkBufferView* pTexelBufferView; +} VkWriteDescriptorSet; + +typedef struct VkAttachmentDescription { + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription; + +typedef struct VkAttachmentReference { + uint32_t attachment; + VkImageLayout layout; +} VkAttachmentReference; + +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + +typedef struct VkSubpassDescription { + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const VkAttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference* pColorAttachments; + const VkAttachmentReference* pResolveAttachments; + const VkAttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription; + +typedef struct VkSubpassDependency { + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; +} VkSubpassDependency; + +typedef struct VkRenderPassCreateInfo { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency* pDependencies; +} VkRenderPassCreateInfo; + +typedef struct VkCommandPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; +} VkCommandPoolCreateInfo; + +typedef struct VkCommandBufferAllocateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPool commandPool; + VkCommandBufferLevel level; + uint32_t commandBufferCount; +} VkCommandBufferAllocateInfo; + +typedef struct VkCommandBufferInheritanceInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + uint32_t subpass; + VkFramebuffer framebuffer; + VkBool32 occlusionQueryEnable; + VkQueryControlFlags queryFlags; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkCommandBufferInheritanceInfo; + +typedef struct VkCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + VkCommandBufferUsageFlags flags; + const VkCommandBufferInheritanceInfo* pInheritanceInfo; +} VkCommandBufferBeginInfo; + +typedef struct VkBufferCopy { + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy; + +typedef struct VkImageSubresourceLayers { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceLayers; + +typedef struct VkBufferImageCopy { + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy; + +typedef union VkClearColorValue { + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; +} VkClearColorValue; + +typedef struct VkClearDepthStencilValue { + float depth; + uint32_t stencil; +} VkClearDepthStencilValue; + +typedef union VkClearValue { + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +} VkClearValue; + +typedef struct VkClearAttachment { + VkImageAspectFlags aspectMask; + uint32_t colorAttachment; + VkClearValue clearValue; +} VkClearAttachment; + +typedef struct VkClearRect { + VkRect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkClearRect; + +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + +typedef struct VkImageResolve { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve; + +typedef struct VkRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + VkFramebuffer framebuffer; + VkRect2D renderArea; + uint32_t clearValueCount; + const VkClearValue* pClearValues; +} VkRenderPassBeginInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); +typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); +typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); +typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); +typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); +typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); +typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); +typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); +typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); +typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); +typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); +typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); +typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); +typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); +typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); +typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); +typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); +typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( + const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance); + +VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( + VkInstance instance, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( + VkInstance instance, + uint32_t* pPhysicalDeviceCount, + VkPhysicalDevice* pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( + VkDevice device, + const char* pName); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( + VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( + VkDevice device, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( + VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( + VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo* pSubmits, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( + VkQueue queue); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( + VkDevice device, + const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory); + +VKAPI_ATTR void VKAPI_CALL vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( + VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( + VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( + VkDevice device, + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( + VkDevice device, + VkImage image, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, + VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( + VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( + VkDevice device, + const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFence( + VkDevice device, + VkFence fence, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( + VkDevice device, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences, + VkBool32 waitAll, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( + VkDevice device, + const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore); + +VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( + VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( + VkDevice device, + const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent); + +VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( + VkDevice device, + VkEvent event, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( + VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( + VkDevice device, + const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( + VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( + VkDevice device, + const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( + VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( + VkDevice device, + const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImage( + VkDevice device, + VkImage image, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( + VkDevice device, + VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( + VkDevice device, + const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( + VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( + VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( + VkDevice device, + const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( + VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( + VkDevice device, + VkPipelineCache pipelineCache, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( + VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( + VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( + VkDevice device, + const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( + VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( + VkDevice device, + const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler); + +VKAPI_ATTR void VKAPI_CALL vkDestroySampler( + VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( + VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( + VkDevice device, + const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( + VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( + VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( + VkDevice device, + const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( + VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( + VkDevice device, + const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( + VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( + VkDevice device, + VkRenderPass renderPass, + VkExtent2D* pGranularity); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( + VkDevice device, + const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( + VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( + VkDevice device, + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( + VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( + VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( + VkCommandBuffer commandBuffer, + float lineWidth); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( + VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( + VkCommandBuffer commandBuffer, + const float blendConstants[4]); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( + VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdDraw( + VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( + VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( + VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( + VkCommandBuffer commandBuffer, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( + VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); +#endif + + +// VK_VERSION_1_1 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_1 1 +// Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 + +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) +#define VK_MAX_DEVICE_GROUP_SIZE 32U +#define VK_LUID_SIZE 8U +#define VK_QUEUE_FAMILY_EXTERNAL (~1U) + +typedef enum VkPointClippingBehavior { + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPointClippingBehavior; + +typedef enum VkTessellationDomainOrigin { + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF +} VkTessellationDomainOrigin; + +typedef enum VkSamplerYcbcrModelConversion { + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrModelConversion; + +typedef enum VkSamplerYcbcrRange { + VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, + VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrRange; + +typedef enum VkChromaLocation { + VK_CHROMA_LOCATION_COSITED_EVEN = 0, + VK_CHROMA_LOCATION_MIDPOINT = 1, + VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF +} VkChromaLocation; + +typedef enum VkDescriptorUpdateTemplateType { + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorUpdateTemplateType; + +typedef enum VkSubgroupFeatureFlagBits { + VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001, + VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002, + VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004, + VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008, + VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010, + VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, + VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, + VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubgroupFeatureFlagBits; +typedef VkFlags VkSubgroupFeatureFlags; + +typedef enum VkPeerMemoryFeatureFlagBits { + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008, + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPeerMemoryFeatureFlagBits; +typedef VkFlags VkPeerMemoryFeatureFlags; + +typedef enum VkMemoryAllocateFlagBits { + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT = 0x00000002, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000004, + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryAllocateFlagBits; +typedef VkFlags VkMemoryAllocateFlags; +typedef VkFlags VkCommandPoolTrimFlags; +typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; + +typedef enum VkExternalMemoryHandleTypeFlagBits { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV = 0x00001000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX = 0x00004000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBits; +typedef VkFlags VkExternalMemoryHandleTypeFlags; + +typedef enum VkExternalMemoryFeatureFlagBits { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBits; +typedef VkFlags VkExternalMemoryFeatureFlags; + +typedef enum VkExternalFenceHandleTypeFlagBits { + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceHandleTypeFlagBits; +typedef VkFlags VkExternalFenceHandleTypeFlags; + +typedef enum VkExternalFenceFeatureFlagBits { + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceFeatureFlagBits; +typedef VkFlags VkExternalFenceFeatureFlags; + +typedef enum VkFenceImportFlagBits { + VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT, + VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceImportFlagBits; +typedef VkFlags VkFenceImportFlags; + +typedef enum VkSemaphoreImportFlagBits { + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreImportFlagBits; +typedef VkFlags VkSemaphoreImportFlags; + +typedef enum VkExternalSemaphoreHandleTypeFlagBits { + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA = 0x00000080, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreHandleTypeFlagBits; +typedef VkFlags VkExternalSemaphoreHandleTypeFlags; + +typedef enum VkExternalSemaphoreFeatureFlagBits { + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreFeatureFlagBits; +typedef VkFlags VkExternalSemaphoreFeatureFlags; +typedef struct VkPhysicalDeviceSubgroupProperties { + VkStructureType sType; + void* pNext; + uint32_t subgroupSize; + VkShaderStageFlags supportedStages; + VkSubgroupFeatureFlags supportedOperations; + VkBool32 quadOperationsInAllStages; +} VkPhysicalDeviceSubgroupProperties; + +typedef struct VkBindBufferMemoryInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindBufferMemoryInfo; + +typedef struct VkBindImageMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindImageMemoryInfo; + +typedef struct VkPhysicalDevice16BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; +} VkPhysicalDevice16BitStorageFeatures; + +typedef struct VkMemoryDedicatedRequirements { + VkStructureType sType; + void* pNext; + VkBool32 prefersDedicatedAllocation; + VkBool32 requiresDedicatedAllocation; +} VkMemoryDedicatedRequirements; + +typedef struct VkMemoryDedicatedAllocateInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkMemoryDedicatedAllocateInfo; + +typedef struct VkMemoryAllocateFlagsInfo { + VkStructureType sType; + const void* pNext; + VkMemoryAllocateFlags flags; + uint32_t deviceMask; +} VkMemoryAllocateFlagsInfo; + +typedef struct VkDeviceGroupRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const VkRect2D* pDeviceRenderAreas; +} VkDeviceGroupRenderPassBeginInfo; + +typedef struct VkDeviceGroupCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; +} VkDeviceGroupCommandBufferBeginInfo; + +typedef struct VkDeviceGroupSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; +} VkDeviceGroupSubmitInfo; + +typedef struct VkDeviceGroupBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; +} VkDeviceGroupBindSparseInfo; + +typedef struct VkBindBufferMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindBufferMemoryDeviceGroupInfo; + +typedef struct VkBindImageMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const VkRect2D* pSplitInstanceBindRegions; +} VkBindImageMemoryDeviceGroupInfo; + +typedef struct VkPhysicalDeviceGroupProperties { + VkStructureType sType; + void* pNext; + uint32_t physicalDeviceCount; + VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + VkBool32 subsetAllocation; +} VkPhysicalDeviceGroupProperties; + +typedef struct VkDeviceGroupDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t physicalDeviceCount; + const VkPhysicalDevice* pPhysicalDevices; +} VkDeviceGroupDeviceCreateInfo; + +typedef struct VkBufferMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferMemoryRequirementsInfo2; + +typedef struct VkImageMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageMemoryRequirementsInfo2; + +typedef struct VkImageSparseMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageSparseMemoryRequirementsInfo2; + +typedef struct VkMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkMemoryRequirements memoryRequirements; +} VkMemoryRequirements2; + +typedef struct VkSparseImageMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkSparseImageMemoryRequirements memoryRequirements; +} VkSparseImageMemoryRequirements2; + +typedef struct VkPhysicalDeviceFeatures2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceFeatures features; +} VkPhysicalDeviceFeatures2; + +typedef struct VkPhysicalDeviceProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties properties; +} VkPhysicalDeviceProperties2; + +typedef struct VkFormatProperties2 { + VkStructureType sType; + void* pNext; + VkFormatProperties formatProperties; +} VkFormatProperties2; + +typedef struct VkImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkImageFormatProperties imageFormatProperties; +} VkImageFormatProperties2; + +typedef struct VkPhysicalDeviceImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkImageCreateFlags flags; +} VkPhysicalDeviceImageFormatInfo2; + +typedef struct VkQueueFamilyProperties2 { + VkStructureType sType; + void* pNext; + VkQueueFamilyProperties queueFamilyProperties; +} VkQueueFamilyProperties2; + +typedef struct VkPhysicalDeviceMemoryProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceMemoryProperties memoryProperties; +} VkPhysicalDeviceMemoryProperties2; + +typedef struct VkSparseImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkSparseImageFormatProperties properties; +} VkSparseImageFormatProperties2; + +typedef struct VkPhysicalDeviceSparseImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkSampleCountFlagBits samples; + VkImageUsageFlags usage; + VkImageTiling tiling; +} VkPhysicalDeviceSparseImageFormatInfo2; + +typedef struct VkPhysicalDevicePointClippingProperties { + VkStructureType sType; + void* pNext; + VkPointClippingBehavior pointClippingBehavior; +} VkPhysicalDevicePointClippingProperties; + +typedef struct VkInputAttachmentAspectReference { + uint32_t subpass; + uint32_t inputAttachmentIndex; + VkImageAspectFlags aspectMask; +} VkInputAttachmentAspectReference; + +typedef struct VkRenderPassInputAttachmentAspectCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t aspectReferenceCount; + const VkInputAttachmentAspectReference* pAspectReferences; +} VkRenderPassInputAttachmentAspectCreateInfo; + +typedef struct VkImageViewUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags usage; +} VkImageViewUsageCreateInfo; + +typedef struct VkPipelineTessellationDomainOriginStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkTessellationDomainOrigin domainOrigin; +} VkPipelineTessellationDomainOriginStateCreateInfo; + +typedef struct VkRenderPassMultiviewCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; +} VkRenderPassMultiviewCreateInfo; + +typedef struct VkPhysicalDeviceMultiviewFeatures { + VkStructureType sType; + void* pNext; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; +} VkPhysicalDeviceMultiviewFeatures; + +typedef struct VkPhysicalDeviceMultiviewProperties { + VkStructureType sType; + void* pNext; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; +} VkPhysicalDeviceMultiviewProperties; + +typedef struct VkPhysicalDeviceVariablePointersFeatures { + VkStructureType sType; + void* pNext; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; +} VkPhysicalDeviceVariablePointersFeatures; + +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 protectedMemory; +} VkPhysicalDeviceProtectedMemoryFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryProperties { + VkStructureType sType; + void* pNext; + VkBool32 protectedNoFault; +} VkPhysicalDeviceProtectedMemoryProperties; + +typedef struct VkDeviceQueueInfo2 { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; +} VkDeviceQueueInfo2; + +typedef struct VkProtectedSubmitInfo { + VkStructureType sType; + const void* pNext; + VkBool32 protectedSubmit; +} VkProtectedSubmitInfo; + +typedef struct VkSamplerYcbcrConversionCreateInfo { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkSamplerYcbcrModelConversion ycbcrModel; + VkSamplerYcbcrRange ycbcrRange; + VkComponentMapping components; + VkChromaLocation xChromaOffset; + VkChromaLocation yChromaOffset; + VkFilter chromaFilter; + VkBool32 forceExplicitReconstruction; +} VkSamplerYcbcrConversionCreateInfo; + +typedef struct VkSamplerYcbcrConversionInfo { + VkStructureType sType; + const void* pNext; + VkSamplerYcbcrConversion conversion; +} VkSamplerYcbcrConversionInfo; + +typedef struct VkBindImagePlaneMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkBindImagePlaneMemoryInfo; + +typedef struct VkImagePlaneMemoryRequirementsInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkImagePlaneMemoryRequirementsInfo; + +typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures { + VkStructureType sType; + void* pNext; + VkBool32 samplerYcbcrConversion; +} VkPhysicalDeviceSamplerYcbcrConversionFeatures; + +typedef struct VkSamplerYcbcrConversionImageFormatProperties { + VkStructureType sType; + void* pNext; + uint32_t combinedImageSamplerDescriptorCount; +} VkSamplerYcbcrConversionImageFormatProperties; + +typedef struct VkDescriptorUpdateTemplateEntry { + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + size_t offset; + size_t stride; +} VkDescriptorUpdateTemplateEntry; + +typedef struct VkDescriptorUpdateTemplateCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + VkDescriptorUpdateTemplateType templateType; + VkDescriptorSetLayout descriptorSetLayout; + VkPipelineBindPoint pipelineBindPoint; + VkPipelineLayout pipelineLayout; + uint32_t set; +} VkDescriptorUpdateTemplateCreateInfo; + +typedef struct VkExternalMemoryProperties { + VkExternalMemoryFeatureFlags externalMemoryFeatures; + VkExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlags compatibleHandleTypes; +} VkExternalMemoryProperties; + +typedef struct VkPhysicalDeviceExternalImageFormatInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalImageFormatInfo; + +typedef struct VkExternalImageFormatProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalImageFormatProperties; + +typedef struct VkPhysicalDeviceExternalBufferInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkBufferUsageFlags usage; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalBufferInfo; + +typedef struct VkExternalBufferProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalBufferProperties; + +typedef struct VkPhysicalDeviceIDProperties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; +} VkPhysicalDeviceIDProperties; + +typedef struct VkExternalMemoryImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryImageCreateInfo; + +typedef struct VkExternalMemoryBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryBufferCreateInfo; + +typedef struct VkExportMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExportMemoryAllocateInfo; + +typedef struct VkPhysicalDeviceExternalFenceInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalFenceInfo; + +typedef struct VkExternalFenceProperties { + VkStructureType sType; + void* pNext; + VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + VkExternalFenceHandleTypeFlags compatibleHandleTypes; + VkExternalFenceFeatureFlags externalFenceFeatures; +} VkExternalFenceProperties; + +typedef struct VkExportFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlags handleTypes; +} VkExportFenceCreateInfo; + +typedef struct VkExportSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlags handleTypes; +} VkExportSemaphoreCreateInfo; + +typedef struct VkPhysicalDeviceExternalSemaphoreInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalSemaphoreInfo; + +typedef struct VkExternalSemaphoreProperties { + VkStructureType sType; + void* pNext; + VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures; +} VkExternalSemaphoreProperties; + +typedef struct VkPhysicalDeviceMaintenance3Properties { + VkStructureType sType; + void* pNext; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceMaintenance3Properties; + +typedef struct VkDescriptorSetLayoutSupport { + VkStructureType sType; + void* pNext; + VkBool32 supported; +} VkDescriptorSetLayoutSupport; + +typedef struct VkPhysicalDeviceShaderDrawParametersFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceShaderDrawParametersFeatures; + +typedef VkPhysicalDeviceShaderDrawParametersFeatures VkPhysicalDeviceShaderDrawParameterFeatures; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion( + uint32_t* pApiVersion); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2( + VkDevice device, + const VkDeviceQueueInfo2* pQueueInfo, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + + +// VK_VERSION_1_2 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_2 1 +// Vulkan 1.2 version number +#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 + +#define VK_MAX_DRIVER_NAME_SIZE 256U +#define VK_MAX_DRIVER_INFO_SIZE 256U + +typedef enum VkDriverId { + VK_DRIVER_ID_AMD_PROPRIETARY = 1, + VK_DRIVER_ID_AMD_OPEN_SOURCE = 2, + VK_DRIVER_ID_MESA_RADV = 3, + VK_DRIVER_ID_NVIDIA_PROPRIETARY = 4, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS = 5, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA = 6, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY = 7, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY = 8, + VK_DRIVER_ID_ARM_PROPRIETARY = 9, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10, + VK_DRIVER_ID_GGP_PROPRIETARY = 11, + VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12, + VK_DRIVER_ID_MESA_LLVMPIPE = 13, + VK_DRIVER_ID_MOLTENVK = 14, + VK_DRIVER_ID_COREAVI_PROPRIETARY = 15, + VK_DRIVER_ID_JUICE_PROPRIETARY = 16, + VK_DRIVER_ID_VERISILICON_PROPRIETARY = 17, + VK_DRIVER_ID_MESA_TURNIP = 18, + VK_DRIVER_ID_MESA_V3DV = 19, + VK_DRIVER_ID_MESA_PANVK = 20, + VK_DRIVER_ID_SAMSUNG_PROPRIETARY = 21, + VK_DRIVER_ID_MESA_VENUS = 22, + VK_DRIVER_ID_MESA_DOZEN = 23, + VK_DRIVER_ID_MESA_NVK = 24, + VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA = 25, + VK_DRIVER_ID_MESA_AGXV = 26, + VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, + VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, + VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, + VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + VK_DRIVER_ID_ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY, + VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF +} VkDriverId; + +typedef enum VkShaderFloatControlsIndependence { + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY = 0, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL = 1, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE = 2, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF +} VkShaderFloatControlsIndependence; + +typedef enum VkSamplerReductionMode { + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0, + VK_SAMPLER_REDUCTION_MODE_MIN = 1, + VK_SAMPLER_REDUCTION_MODE_MAX = 2, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM = 1000521000, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN, + VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX, + VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerReductionMode; + +typedef enum VkSemaphoreType { + VK_SEMAPHORE_TYPE_BINARY = 0, + VK_SEMAPHORE_TYPE_TIMELINE = 1, + VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY, + VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE, + VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreType; + +typedef enum VkResolveModeFlagBits { + VK_RESOLVE_MODE_NONE = 0, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT = 0x00000001, + VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002, + VK_RESOLVE_MODE_MIN_BIT = 0x00000004, + VK_RESOLVE_MODE_MAX_BIT = 0x00000008, + VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID = 0x00000010, + VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT, + VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT, + VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT, + VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkResolveModeFlagBits; +typedef VkFlags VkResolveModeFlags; + +typedef enum VkDescriptorBindingFlagBits { + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT = 0x00000001, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT = 0x00000002, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT = 0x00000004, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT = 0x00000008, + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT, + VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorBindingFlagBits; +typedef VkFlags VkDescriptorBindingFlags; + +typedef enum VkSemaphoreWaitFlagBits { + VK_SEMAPHORE_WAIT_ANY_BIT = 0x00000001, + VK_SEMAPHORE_WAIT_ANY_BIT_KHR = VK_SEMAPHORE_WAIT_ANY_BIT, + VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreWaitFlagBits; +typedef VkFlags VkSemaphoreWaitFlags; +typedef struct VkPhysicalDeviceVulkan11Features { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; + VkBool32 protectedMemory; + VkBool32 samplerYcbcrConversion; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceVulkan11Features; + +typedef struct VkPhysicalDeviceVulkan11Properties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; + uint32_t subgroupSize; + VkShaderStageFlags subgroupSupportedStages; + VkSubgroupFeatureFlags subgroupSupportedOperations; + VkBool32 subgroupQuadOperationsInAllStages; + VkPointClippingBehavior pointClippingBehavior; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; + VkBool32 protectedNoFault; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceVulkan11Properties; + +typedef struct VkPhysicalDeviceVulkan12Features { + VkStructureType sType; + void* pNext; + VkBool32 samplerMirrorClampToEdge; + VkBool32 drawIndirectCount; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; + VkBool32 descriptorIndexing; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; + VkBool32 samplerFilterMinmax; + VkBool32 scalarBlockLayout; + VkBool32 imagelessFramebuffer; + VkBool32 uniformBufferStandardLayout; + VkBool32 shaderSubgroupExtendedTypes; + VkBool32 separateDepthStencilLayouts; + VkBool32 hostQueryReset; + VkBool32 timelineSemaphore; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; + VkBool32 shaderOutputViewportIndex; + VkBool32 shaderOutputLayer; + VkBool32 subgroupBroadcastDynamicId; +} VkPhysicalDeviceVulkan12Features; + +typedef struct VkConformanceVersion { + uint8_t major; + uint8_t minor; + uint8_t subminor; + uint8_t patch; +} VkConformanceVersion; + +typedef struct VkPhysicalDeviceVulkan12Properties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; + uint64_t maxTimelineSemaphoreValueDifference; + VkSampleCountFlags framebufferIntegerColorSampleCounts; +} VkPhysicalDeviceVulkan12Properties; + +typedef struct VkImageFormatListCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkImageFormatListCreateInfo; + +typedef struct VkAttachmentDescription2 { + VkStructureType sType; + const void* pNext; + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription2; + +typedef struct VkAttachmentReference2 { + VkStructureType sType; + const void* pNext; + uint32_t attachment; + VkImageLayout layout; + VkImageAspectFlags aspectMask; +} VkAttachmentReference2; + +typedef struct VkSubpassDescription2 { + VkStructureType sType; + const void* pNext; + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t viewMask; + uint32_t inputAttachmentCount; + const VkAttachmentReference2* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference2* pColorAttachments; + const VkAttachmentReference2* pResolveAttachments; + const VkAttachmentReference2* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription2; + +typedef struct VkSubpassDependency2 { + VkStructureType sType; + const void* pNext; + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; + int32_t viewOffset; +} VkSubpassDependency2; + +typedef struct VkRenderPassCreateInfo2 { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription2* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription2* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency2* pDependencies; + uint32_t correlatedViewMaskCount; + const uint32_t* pCorrelatedViewMasks; +} VkRenderPassCreateInfo2; + +typedef struct VkSubpassBeginInfo { + VkStructureType sType; + const void* pNext; + VkSubpassContents contents; +} VkSubpassBeginInfo; + +typedef struct VkSubpassEndInfo { + VkStructureType sType; + const void* pNext; +} VkSubpassEndInfo; + +typedef struct VkPhysicalDevice8BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; +} VkPhysicalDevice8BitStorageFeatures; + +typedef struct VkPhysicalDeviceDriverProperties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; +} VkPhysicalDeviceDriverProperties; + +typedef struct VkPhysicalDeviceShaderAtomicInt64Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; +} VkPhysicalDeviceShaderAtomicInt64Features; + +typedef struct VkPhysicalDeviceShaderFloat16Int8Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; +} VkPhysicalDeviceShaderFloat16Int8Features; + +typedef struct VkPhysicalDeviceFloatControlsProperties { + VkStructureType sType; + void* pNext; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; +} VkPhysicalDeviceFloatControlsProperties; + +typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t bindingCount; + const VkDescriptorBindingFlags* pBindingFlags; +} VkDescriptorSetLayoutBindingFlagsCreateInfo; + +typedef struct VkPhysicalDeviceDescriptorIndexingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; +} VkPhysicalDeviceDescriptorIndexingFeatures; + +typedef struct VkPhysicalDeviceDescriptorIndexingProperties { + VkStructureType sType; + void* pNext; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; +} VkPhysicalDeviceDescriptorIndexingProperties; + +typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfo { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; +} VkDescriptorSetVariableDescriptorCountAllocateInfo; + +typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupport { + VkStructureType sType; + void* pNext; + uint32_t maxVariableDescriptorCount; +} VkDescriptorSetVariableDescriptorCountLayoutSupport; + +typedef struct VkSubpassDescriptionDepthStencilResolve { + VkStructureType sType; + const void* pNext; + VkResolveModeFlagBits depthResolveMode; + VkResolveModeFlagBits stencilResolveMode; + const VkAttachmentReference2* pDepthStencilResolveAttachment; +} VkSubpassDescriptionDepthStencilResolve; + +typedef struct VkPhysicalDeviceDepthStencilResolveProperties { + VkStructureType sType; + void* pNext; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; +} VkPhysicalDeviceDepthStencilResolveProperties; + +typedef struct VkPhysicalDeviceScalarBlockLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 scalarBlockLayout; +} VkPhysicalDeviceScalarBlockLayoutFeatures; + +typedef struct VkImageStencilUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags stencilUsage; +} VkImageStencilUsageCreateInfo; + +typedef struct VkSamplerReductionModeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerReductionMode reductionMode; +} VkSamplerReductionModeCreateInfo; + +typedef struct VkPhysicalDeviceSamplerFilterMinmaxProperties { + VkStructureType sType; + void* pNext; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; +} VkPhysicalDeviceSamplerFilterMinmaxProperties; + +typedef struct VkPhysicalDeviceVulkanMemoryModelFeatures { + VkStructureType sType; + void* pNext; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; +} VkPhysicalDeviceVulkanMemoryModelFeatures; + +typedef struct VkPhysicalDeviceImagelessFramebufferFeatures { + VkStructureType sType; + void* pNext; + VkBool32 imagelessFramebuffer; +} VkPhysicalDeviceImagelessFramebufferFeatures; + +typedef struct VkFramebufferAttachmentImageInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageUsageFlags usage; + uint32_t width; + uint32_t height; + uint32_t layerCount; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkFramebufferAttachmentImageInfo; + +typedef struct VkFramebufferAttachmentsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentImageInfoCount; + const VkFramebufferAttachmentImageInfo* pAttachmentImageInfos; +} VkFramebufferAttachmentsCreateInfo; + +typedef struct VkRenderPassAttachmentBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkImageView* pAttachments; +} VkRenderPassAttachmentBeginInfo; + +typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 uniformBufferStandardLayout; +} VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + +typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupExtendedTypes; +} VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures { + VkStructureType sType; + void* pNext; + VkBool32 separateDepthStencilLayouts; +} VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +typedef struct VkAttachmentReferenceStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilLayout; +} VkAttachmentReferenceStencilLayout; + +typedef struct VkAttachmentDescriptionStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilInitialLayout; + VkImageLayout stencilFinalLayout; +} VkAttachmentDescriptionStencilLayout; + +typedef struct VkPhysicalDeviceHostQueryResetFeatures { + VkStructureType sType; + void* pNext; + VkBool32 hostQueryReset; +} VkPhysicalDeviceHostQueryResetFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreFeatures { + VkStructureType sType; + void* pNext; + VkBool32 timelineSemaphore; +} VkPhysicalDeviceTimelineSemaphoreFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreProperties { + VkStructureType sType; + void* pNext; + uint64_t maxTimelineSemaphoreValueDifference; +} VkPhysicalDeviceTimelineSemaphoreProperties; + +typedef struct VkSemaphoreTypeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreType semaphoreType; + uint64_t initialValue; +} VkSemaphoreTypeCreateInfo; + +typedef struct VkTimelineSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValueCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValueCount; + const uint64_t* pSignalSemaphoreValues; +} VkTimelineSemaphoreSubmitInfo; + +typedef struct VkSemaphoreWaitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreWaitFlags flags; + uint32_t semaphoreCount; + const VkSemaphore* pSemaphores; + const uint64_t* pValues; +} VkSemaphoreWaitInfo; + +typedef struct VkSemaphoreSignalInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; +} VkSemaphoreSignalInfo; + +typedef struct VkPhysicalDeviceBufferDeviceAddressFeatures { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeatures; + +typedef struct VkBufferDeviceAddressInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferDeviceAddressInfo; + +typedef struct VkBufferOpaqueCaptureAddressCreateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkBufferOpaqueCaptureAddressCreateInfo; + +typedef struct VkMemoryOpaqueCaptureAddressAllocateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkMemoryOpaqueCaptureAddressAllocateInfo; + +typedef struct VkDeviceMemoryOpaqueCaptureAddressInfo { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkDeviceMemoryOpaqueCaptureAddressInfo; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkResetQueryPool)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValue)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphores)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphore)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddress)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkResetQueryPool( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +// VK_VERSION_1_3 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_3 1 +// Vulkan 1.3 version number +#define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0)// Patch version should always be set to 0 + +typedef uint64_t VkFlags64; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlot) + +typedef enum VkPipelineCreationFeedbackFlagBits { + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT = 0x00000001, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT = 0x00000002, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT = 0x00000004, + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT, + VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreationFeedbackFlagBits; +typedef VkFlags VkPipelineCreationFeedbackFlags; + +typedef enum VkToolPurposeFlagBits { + VK_TOOL_PURPOSE_VALIDATION_BIT = 0x00000001, + VK_TOOL_PURPOSE_PROFILING_BIT = 0x00000002, + VK_TOOL_PURPOSE_TRACING_BIT = 0x00000004, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT = 0x00000008, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT = 0x00000010, + VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020, + VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040, + VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = VK_TOOL_PURPOSE_VALIDATION_BIT, + VK_TOOL_PURPOSE_PROFILING_BIT_EXT = VK_TOOL_PURPOSE_PROFILING_BIT, + VK_TOOL_PURPOSE_TRACING_BIT_EXT = VK_TOOL_PURPOSE_TRACING_BIT, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT, + VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkToolPurposeFlagBits; +typedef VkFlags VkToolPurposeFlags; +typedef VkFlags VkPrivateDataSlotCreateFlags; +typedef VkFlags64 VkPipelineStageFlags2; + +// Flag bits for VkPipelineStageFlagBits2 +typedef VkFlags64 VkPipelineStageFlagBits2; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT = 0x4000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI = 0x8000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT = 0x40000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI = 0x20000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV = 0x20000000ULL; + +typedef VkFlags64 VkAccessFlags2; + +// Flag bits for VkAccessFlagBits2 +typedef VkFlags64 VkAccessFlagBits2; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT = 0x20000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR = 0x10000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_READ_BIT_EXT = 0x100000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT = 0x200000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV = 0x40000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV = 0x80000000000ULL; + + +typedef enum VkSubmitFlagBits { + VK_SUBMIT_PROTECTED_BIT = 0x00000001, + VK_SUBMIT_PROTECTED_BIT_KHR = VK_SUBMIT_PROTECTED_BIT, + VK_SUBMIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubmitFlagBits; +typedef VkFlags VkSubmitFlags; + +typedef enum VkRenderingFlagBits { + VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT = 0x00000001, + VK_RENDERING_SUSPENDING_BIT = 0x00000002, + VK_RENDERING_RESUMING_BIT = 0x00000004, + VK_RENDERING_CONTENTS_INLINE_BIT_EXT = 0x00000010, + VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008, + VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, + VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT, + VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT, + VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderingFlagBits; +typedef VkFlags VkRenderingFlags; +typedef VkFlags64 VkFormatFeatureFlags2; + +// Flag bits for VkFormatFeatureFlagBits2 +typedef VkFlags64 VkFormatFeatureFlagBits2; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT = 0x400000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM = 0x400000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL; + +typedef struct VkPhysicalDeviceVulkan13Features { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; + VkBool32 pipelineCreationCacheControl; + VkBool32 privateData; + VkBool32 shaderDemoteToHelperInvocation; + VkBool32 shaderTerminateInvocation; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; + VkBool32 synchronization2; + VkBool32 textureCompressionASTC_HDR; + VkBool32 shaderZeroInitializeWorkgroupMemory; + VkBool32 dynamicRendering; + VkBool32 shaderIntegerDotProduct; + VkBool32 maintenance4; +} VkPhysicalDeviceVulkan13Features; + +typedef struct VkPhysicalDeviceVulkan13Properties { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; + uint32_t maxInlineUniformTotalSize; + VkBool32 integerDotProduct8BitUnsignedAccelerated; + VkBool32 integerDotProduct8BitSignedAccelerated; + VkBool32 integerDotProduct8BitMixedSignednessAccelerated; + VkBool32 integerDotProduct4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedSignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProduct16BitUnsignedAccelerated; + VkBool32 integerDotProduct16BitSignedAccelerated; + VkBool32 integerDotProduct16BitMixedSignednessAccelerated; + VkBool32 integerDotProduct32BitUnsignedAccelerated; + VkBool32 integerDotProduct32BitSignedAccelerated; + VkBool32 integerDotProduct32BitMixedSignednessAccelerated; + VkBool32 integerDotProduct64BitUnsignedAccelerated; + VkBool32 integerDotProduct64BitSignedAccelerated; + VkBool32 integerDotProduct64BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize maxBufferSize; +} VkPhysicalDeviceVulkan13Properties; + +typedef struct VkPipelineCreationFeedback { + VkPipelineCreationFeedbackFlags flags; + uint64_t duration; +} VkPipelineCreationFeedback; + +typedef struct VkPipelineCreationFeedbackCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreationFeedback* pPipelineCreationFeedback; + uint32_t pipelineStageCreationFeedbackCount; + VkPipelineCreationFeedback* pPipelineStageCreationFeedbacks; +} VkPipelineCreationFeedbackCreateInfo; + +typedef struct VkPhysicalDeviceShaderTerminateInvocationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderTerminateInvocation; +} VkPhysicalDeviceShaderTerminateInvocationFeatures; + +typedef struct VkPhysicalDeviceToolProperties { + VkStructureType sType; + void* pNext; + char name[VK_MAX_EXTENSION_NAME_SIZE]; + char version[VK_MAX_EXTENSION_NAME_SIZE]; + VkToolPurposeFlags purposes; + char description[VK_MAX_DESCRIPTION_SIZE]; + char layer[VK_MAX_EXTENSION_NAME_SIZE]; +} VkPhysicalDeviceToolProperties; + +typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDemoteToHelperInvocation; +} VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + +typedef struct VkPhysicalDevicePrivateDataFeatures { + VkStructureType sType; + void* pNext; + VkBool32 privateData; +} VkPhysicalDevicePrivateDataFeatures; + +typedef struct VkDevicePrivateDataCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t privateDataSlotRequestCount; +} VkDevicePrivateDataCreateInfo; + +typedef struct VkPrivateDataSlotCreateInfo { + VkStructureType sType; + const void* pNext; + VkPrivateDataSlotCreateFlags flags; +} VkPrivateDataSlotCreateInfo; + +typedef struct VkPhysicalDevicePipelineCreationCacheControlFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineCreationCacheControl; +} VkPhysicalDevicePipelineCreationCacheControlFeatures; + +typedef struct VkMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; +} VkMemoryBarrier2; + +typedef struct VkBufferMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier2; + +typedef struct VkImageMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier2; + +typedef struct VkDependencyInfo { + VkStructureType sType; + const void* pNext; + VkDependencyFlags dependencyFlags; + uint32_t memoryBarrierCount; + const VkMemoryBarrier2* pMemoryBarriers; + uint32_t bufferMemoryBarrierCount; + const VkBufferMemoryBarrier2* pBufferMemoryBarriers; + uint32_t imageMemoryBarrierCount; + const VkImageMemoryBarrier2* pImageMemoryBarriers; +} VkDependencyInfo; + +typedef struct VkSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; + VkPipelineStageFlags2 stageMask; + uint32_t deviceIndex; +} VkSemaphoreSubmitInfo; + +typedef struct VkCommandBufferSubmitInfo { + VkStructureType sType; + const void* pNext; + VkCommandBuffer commandBuffer; + uint32_t deviceMask; +} VkCommandBufferSubmitInfo; + +typedef struct VkSubmitInfo2 { + VkStructureType sType; + const void* pNext; + VkSubmitFlags flags; + uint32_t waitSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pWaitSemaphoreInfos; + uint32_t commandBufferInfoCount; + const VkCommandBufferSubmitInfo* pCommandBufferInfos; + uint32_t signalSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pSignalSemaphoreInfos; +} VkSubmitInfo2; + +typedef struct VkPhysicalDeviceSynchronization2Features { + VkStructureType sType; + void* pNext; + VkBool32 synchronization2; +} VkPhysicalDeviceSynchronization2Features; + +typedef struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderZeroInitializeWorkgroupMemory; +} VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + +typedef struct VkPhysicalDeviceImageRobustnessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; +} VkPhysicalDeviceImageRobustnessFeatures; + +typedef struct VkBufferCopy2 { + VkStructureType sType; + const void* pNext; + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy2; + +typedef struct VkCopyBufferInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferCopy2* pRegions; +} VkCopyBufferInfo2; + +typedef struct VkImageCopy2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy2; + +typedef struct VkCopyImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageInfo2; + +typedef struct VkBufferImageCopy2 { + VkStructureType sType; + const void* pNext; + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy2; + +typedef struct VkCopyBufferToImageInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkBufferImageCopy2* pRegions; +} VkCopyBufferToImageInfo2; + +typedef struct VkCopyImageToBufferInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferImageCopy2* pRegions; +} VkCopyImageToBufferInfo2; + +typedef struct VkImageBlit2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit2; + +typedef struct VkBlitImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageBlit2* pRegions; + VkFilter filter; +} VkBlitImageInfo2; + +typedef struct VkImageResolve2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve2; + +typedef struct VkResolveImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageResolve2* pRegions; +} VkResolveImageInfo2; + +typedef struct VkPhysicalDeviceSubgroupSizeControlFeatures { + VkStructureType sType; + void* pNext; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; +} VkPhysicalDeviceSubgroupSizeControlFeatures; + +typedef struct VkPhysicalDeviceSubgroupSizeControlProperties { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; +} VkPhysicalDeviceSubgroupSizeControlProperties; + +typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo { + VkStructureType sType; + void* pNext; + uint32_t requiredSubgroupSize; +} VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; + +typedef struct VkPhysicalDeviceInlineUniformBlockFeatures { + VkStructureType sType; + void* pNext; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; +} VkPhysicalDeviceInlineUniformBlockFeatures; + +typedef struct VkPhysicalDeviceInlineUniformBlockProperties { + VkStructureType sType; + void* pNext; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; +} VkPhysicalDeviceInlineUniformBlockProperties; + +typedef struct VkWriteDescriptorSetInlineUniformBlock { + VkStructureType sType; + const void* pNext; + uint32_t dataSize; + const void* pData; +} VkWriteDescriptorSetInlineUniformBlock; + +typedef struct VkDescriptorPoolInlineUniformBlockCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t maxInlineUniformBlockBindings; +} VkDescriptorPoolInlineUniformBlockCreateInfo; + +typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures { + VkStructureType sType; + void* pNext; + VkBool32 textureCompressionASTC_HDR; +} VkPhysicalDeviceTextureCompressionASTCHDRFeatures; + +typedef struct VkRenderingAttachmentInfo { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkResolveModeFlagBits resolveMode; + VkImageView resolveImageView; + VkImageLayout resolveImageLayout; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkClearValue clearValue; +} VkRenderingAttachmentInfo; + +typedef struct VkRenderingInfo { + VkStructureType sType; + const void* pNext; + VkRenderingFlags flags; + VkRect2D renderArea; + uint32_t layerCount; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkRenderingAttachmentInfo* pColorAttachments; + const VkRenderingAttachmentInfo* pDepthAttachment; + const VkRenderingAttachmentInfo* pStencilAttachment; +} VkRenderingInfo; + +typedef struct VkPipelineRenderingCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkPipelineRenderingCreateInfo; + +typedef struct VkPhysicalDeviceDynamicRenderingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRendering; +} VkPhysicalDeviceDynamicRenderingFeatures; + +typedef struct VkCommandBufferInheritanceRenderingInfo { + VkStructureType sType; + const void* pNext; + VkRenderingFlags flags; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; + VkSampleCountFlagBits rasterizationSamples; +} VkCommandBufferInheritanceRenderingInfo; + +typedef struct VkPhysicalDeviceShaderIntegerDotProductFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerDotProduct; +} VkPhysicalDeviceShaderIntegerDotProductFeatures; + +typedef struct VkPhysicalDeviceShaderIntegerDotProductProperties { + VkStructureType sType; + void* pNext; + VkBool32 integerDotProduct8BitUnsignedAccelerated; + VkBool32 integerDotProduct8BitSignedAccelerated; + VkBool32 integerDotProduct8BitMixedSignednessAccelerated; + VkBool32 integerDotProduct4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedSignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProduct16BitUnsignedAccelerated; + VkBool32 integerDotProduct16BitSignedAccelerated; + VkBool32 integerDotProduct16BitMixedSignednessAccelerated; + VkBool32 integerDotProduct32BitUnsignedAccelerated; + VkBool32 integerDotProduct32BitSignedAccelerated; + VkBool32 integerDotProduct32BitMixedSignednessAccelerated; + VkBool32 integerDotProduct64BitUnsignedAccelerated; + VkBool32 integerDotProduct64BitSignedAccelerated; + VkBool32 integerDotProduct64BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated; +} VkPhysicalDeviceShaderIntegerDotProductProperties; + +typedef struct VkPhysicalDeviceTexelBufferAlignmentProperties { + VkStructureType sType; + void* pNext; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; +} VkPhysicalDeviceTexelBufferAlignmentProperties; + +typedef struct VkFormatProperties3 { + VkStructureType sType; + void* pNext; + VkFormatFeatureFlags2 linearTilingFeatures; + VkFormatFeatureFlags2 optimalTilingFeatures; + VkFormatFeatureFlags2 bufferFeatures; +} VkFormatProperties3; + +typedef struct VkPhysicalDeviceMaintenance4Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance4; +} VkPhysicalDeviceMaintenance4Features; + +typedef struct VkPhysicalDeviceMaintenance4Properties { + VkStructureType sType; + void* pNext; + VkDeviceSize maxBufferSize; +} VkPhysicalDeviceMaintenance4Properties; + +typedef struct VkDeviceBufferMemoryRequirements { + VkStructureType sType; + const void* pNext; + const VkBufferCreateInfo* pCreateInfo; +} VkDeviceBufferMemoryRequirements; + +typedef struct VkDeviceImageMemoryRequirements { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + VkImageAspectFlagBits planeAspect; +} VkDeviceImageMemoryRequirements; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolProperties)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlot)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlot)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRendering)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRendering)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdSetCullMode)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFace)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopology)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCount)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCount)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnable)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOp)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnable)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOp)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnable)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnable)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirements)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateData( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2( + VkCommandBuffer commandBuffer, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_KHR_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, + VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, + VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, + VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, + VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, + VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, + VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, + VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkColorSpaceKHR; + +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + + +// VK_KHR_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004, + VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT = 0x00000008, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; + +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; + +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; + +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; + +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; + +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( + VkDevice device, + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); +#endif + + +// VK_KHR_display is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) +#define VK_KHR_DISPLAY_SPEC_VERSION 23 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" +typedef VkFlags VkDisplayModeCreateFlagsKHR; + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_KHR_display_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + + +// VK_KHR_sampler_mirror_clamp_to_edge is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" + + +// VK_KHR_video_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_queue 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) +#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 8 +#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue" + +typedef enum VkQueryResultStatusKHR { + VK_QUERY_RESULT_STATUS_ERROR_KHR = -1, + VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0, + VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1, + VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR = -1000299000, + VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueryResultStatusKHR; + +typedef enum VkVideoCodecOperationFlagBitsKHR { + VK_VIDEO_CODEC_OPERATION_NONE_KHR = 0, + VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR = 0x00010000, + VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR = 0x00020000, + VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR = 0x00000001, + VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR = 0x00000002, + VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodecOperationFlagBitsKHR; +typedef VkFlags VkVideoCodecOperationFlagsKHR; + +typedef enum VkVideoChromaSubsamplingFlagBitsKHR { + VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR = 0, + VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001, + VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002, + VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004, + VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008, + VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoChromaSubsamplingFlagBitsKHR; +typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; + +typedef enum VkVideoComponentBitDepthFlagBitsKHR { + VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0, + VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001, + VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004, + VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010, + VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoComponentBitDepthFlagBitsKHR; +typedef VkFlags VkVideoComponentBitDepthFlagsKHR; + +typedef enum VkVideoCapabilityFlagBitsKHR { + VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002, + VK_VIDEO_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCapabilityFlagBitsKHR; +typedef VkFlags VkVideoCapabilityFlagsKHR; + +typedef enum VkVideoSessionCreateFlagBitsKHR { + VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR = 0x00000002, + VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR = 0x00000004, + VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionCreateFlagsKHR; +typedef VkFlags VkVideoSessionParametersCreateFlagsKHR; +typedef VkFlags VkVideoBeginCodingFlagsKHR; +typedef VkFlags VkVideoEndCodingFlagsKHR; + +typedef enum VkVideoCodingControlFlagBitsKHR { + VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR = 0x00000002, + VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR = 0x00000004, + VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingControlFlagBitsKHR; +typedef VkFlags VkVideoCodingControlFlagsKHR; +typedef struct VkQueueFamilyQueryResultStatusPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 queryResultStatusSupport; +} VkQueueFamilyQueryResultStatusPropertiesKHR; + +typedef struct VkQueueFamilyVideoPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagsKHR videoCodecOperations; +} VkQueueFamilyVideoPropertiesKHR; + +typedef struct VkVideoProfileInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodecOperationFlagBitsKHR videoCodecOperation; + VkVideoChromaSubsamplingFlagsKHR chromaSubsampling; + VkVideoComponentBitDepthFlagsKHR lumaBitDepth; + VkVideoComponentBitDepthFlagsKHR chromaBitDepth; +} VkVideoProfileInfoKHR; + +typedef struct VkVideoProfileListInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t profileCount; + const VkVideoProfileInfoKHR* pProfiles; +} VkVideoProfileListInfoKHR; + +typedef struct VkVideoCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCapabilityFlagsKHR flags; + VkDeviceSize minBitstreamBufferOffsetAlignment; + VkDeviceSize minBitstreamBufferSizeAlignment; + VkExtent2D pictureAccessGranularity; + VkExtent2D minCodedExtent; + VkExtent2D maxCodedExtent; + uint32_t maxDpbSlots; + uint32_t maxActiveReferencePictures; + VkExtensionProperties stdHeaderVersion; +} VkVideoCapabilitiesKHR; + +typedef struct VkPhysicalDeviceVideoFormatInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags imageUsage; +} VkPhysicalDeviceVideoFormatInfoKHR; + +typedef struct VkVideoFormatPropertiesKHR { + VkStructureType sType; + void* pNext; + VkFormat format; + VkComponentMapping componentMapping; + VkImageCreateFlags imageCreateFlags; + VkImageType imageType; + VkImageTiling imageTiling; + VkImageUsageFlags imageUsageFlags; +} VkVideoFormatPropertiesKHR; + +typedef struct VkVideoPictureResourceInfoKHR { + VkStructureType sType; + const void* pNext; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + uint32_t baseArrayLayer; + VkImageView imageViewBinding; +} VkVideoPictureResourceInfoKHR; + +typedef struct VkVideoReferenceSlotInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t slotIndex; + const VkVideoPictureResourceInfoKHR* pPictureResource; +} VkVideoReferenceSlotInfoKHR; + +typedef struct VkVideoSessionMemoryRequirementsKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryBindIndex; + VkMemoryRequirements memoryRequirements; +} VkVideoSessionMemoryRequirementsKHR; + +typedef struct VkBindVideoSessionMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkDeviceSize memorySize; +} VkBindVideoSessionMemoryInfoKHR; + +typedef struct VkVideoSessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + VkVideoSessionCreateFlagsKHR flags; + const VkVideoProfileInfoKHR* pVideoProfile; + VkFormat pictureFormat; + VkExtent2D maxCodedExtent; + VkFormat referencePictureFormat; + uint32_t maxDpbSlots; + uint32_t maxActiveReferencePictures; + const VkExtensionProperties* pStdHeaderVersion; +} VkVideoSessionCreateInfoKHR; + +typedef struct VkVideoSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersCreateFlagsKHR flags; + VkVideoSessionParametersKHR videoSessionParametersTemplate; + VkVideoSessionKHR videoSession; +} VkVideoSessionParametersCreateInfoKHR; + +typedef struct VkVideoSessionParametersUpdateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t updateSequenceCount; +} VkVideoSessionParametersUpdateInfoKHR; + +typedef struct VkVideoBeginCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoBeginCodingFlagsKHR flags; + VkVideoSessionKHR videoSession; + VkVideoSessionParametersKHR videoSessionParameters; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; +} VkVideoBeginCodingInfoKHR; + +typedef struct VkVideoEndCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEndCodingFlagsKHR flags; +} VkVideoEndCodingInfoKHR; + +typedef struct VkVideoCodingControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodingControlFlagsKHR flags; +} VkVideoCodingControlInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters); +typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR* pVideoProfile, + VkVideoCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, + uint32_t* pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR* pVideoFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR( + VkDevice device, + const VkVideoSessionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionKHR* pVideoSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t* pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR( + VkDevice device, + const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionParametersKHR* pVideoSessionParameters); + +VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR* pBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR* pEndCodingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR* pCodingControlInfo); +#endif + + +// VK_KHR_video_decode_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_queue 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 8 +#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" + +typedef enum VkVideoDecodeCapabilityFlagBitsKHR { + VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeCapabilityFlagBitsKHR; +typedef VkFlags VkVideoDecodeCapabilityFlagsKHR; + +typedef enum VkVideoDecodeUsageFlagBitsKHR { + VK_VIDEO_DECODE_USAGE_DEFAULT_KHR = 0, + VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR = 0x00000004, + VK_VIDEO_DECODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeUsageFlagBitsKHR; +typedef VkFlags VkVideoDecodeUsageFlagsKHR; +typedef VkFlags VkVideoDecodeFlagsKHR; +typedef struct VkVideoDecodeCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoDecodeCapabilityFlagsKHR flags; +} VkVideoDecodeCapabilitiesKHR; + +typedef struct VkVideoDecodeUsageInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeUsageFlagsKHR videoUsageHints; +} VkVideoDecodeUsageInfoKHR; + +typedef struct VkVideoDecodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeFlagsKHR flags; + VkBuffer srcBuffer; + VkDeviceSize srcBufferOffset; + VkDeviceSize srcBufferRange; + VkVideoPictureResourceInfoKHR dstPictureResource; + const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; +} VkVideoDecodeInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR* pDecodeInfo); +#endif + + +// VK_KHR_video_encode_h264 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_h264 1 +#include "vk_video/vulkan_video_codec_h264std.h" +#include "vk_video/vulkan_video_codec_h264std_encode.h" +#define VK_KHR_VIDEO_ENCODE_H264_SPEC_VERSION 14 +#define VK_KHR_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_KHR_video_encode_h264" + +typedef enum VkVideoEncodeH264CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264CapabilityFlagsKHR; + +typedef enum VkVideoEncodeH264StdFlagBitsKHR { + VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR = 0x00000800, + VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR = 0x00001000, + VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR = 0x00002000, + VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR = 0x00004000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR = 0x00008000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR = 0x00010000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR = 0x00020000, + VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR = 0x00080000, + VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR = 0x00100000, + VK_VIDEO_ENCODE_H264_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264StdFlagsKHR; + +typedef enum VkVideoEncodeH264RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264RateControlFlagsKHR; +typedef struct VkVideoEncodeH264CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH264CapabilityFlagsKHR flags; + StdVideoH264LevelIdc maxLevelIdc; + uint32_t maxSliceCount; + uint32_t maxPPictureL0ReferenceCount; + uint32_t maxBPictureL0ReferenceCount; + uint32_t maxL1ReferenceCount; + uint32_t maxTemporalLayerCount; + VkBool32 expectDyadicTemporalLayerPattern; + int32_t minQp; + int32_t maxQp; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeH264StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeH264CapabilitiesKHR; + +typedef struct VkVideoEncodeH264QpKHR { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH264QpKHR; + +typedef struct VkVideoEncodeH264QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH264RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredIdrPeriod; + uint32_t preferredConsecutiveBFrameCount; + uint32_t preferredTemporalLayerCount; + VkVideoEncodeH264QpKHR preferredConstantQp; + uint32_t preferredMaxL0ReferenceCount; + uint32_t preferredMaxL1ReferenceCount; + VkBool32 preferredStdEntropyCodingModeFlag; +} VkVideoEncodeH264QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeH264SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevelIdc; + StdVideoH264LevelIdc maxLevelIdc; +} VkVideoEncodeH264SessionCreateInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdSPSCount; + const StdVideoH264SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH264PictureParameterSet* pStdPPSs; +} VkVideoEncodeH264SessionParametersAddInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoEncodeH264SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoEncodeH264SessionParametersCreateInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 writeStdSPS; + VkBool32 writeStdPPS; + uint32_t stdSPSId; + uint32_t stdPPSId; +} VkVideoEncodeH264SessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasStdSPSOverrides; + VkBool32 hasStdPPSOverrides; +} VkVideoEncodeH264SessionParametersFeedbackInfoKHR; + +typedef struct VkVideoEncodeH264NaluSliceInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t constantQp; + const StdVideoEncodeH264SliceHeader* pStdSliceHeader; +} VkVideoEncodeH264NaluSliceInfoKHR; + +typedef struct VkVideoEncodeH264PictureInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t naluSliceEntryCount; + const VkVideoEncodeH264NaluSliceInfoKHR* pNaluSliceEntries; + const StdVideoEncodeH264PictureInfo* pStdPictureInfo; + VkBool32 generatePrefixNalu; +} VkVideoEncodeH264PictureInfoKHR; + +typedef struct VkVideoEncodeH264DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH264DpbSlotInfoKHR; + +typedef struct VkVideoEncodeH264ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; +} VkVideoEncodeH264ProfileInfoKHR; + +typedef struct VkVideoEncodeH264RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + uint32_t temporalLayerCount; +} VkVideoEncodeH264RateControlInfoKHR; + +typedef struct VkVideoEncodeH264FrameSizeKHR { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH264FrameSizeKHR; + +typedef struct VkVideoEncodeH264RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQp; + VkVideoEncodeH264QpKHR minQp; + VkBool32 useMaxQp; + VkVideoEncodeH264QpKHR maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH264FrameSizeKHR maxFrameSize; +} VkVideoEncodeH264RateControlLayerInfoKHR; + +typedef struct VkVideoEncodeH264GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingI; + uint32_t gopRemainingP; + uint32_t gopRemainingB; +} VkVideoEncodeH264GopRemainingFrameInfoKHR; + + + +// VK_KHR_video_encode_h265 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_h265 1 +#include "vk_video/vulkan_video_codec_h265std.h" +#include "vk_video/vulkan_video_codec_h265std_encode.h" +#define VK_KHR_VIDEO_ENCODE_H265_SPEC_VERSION 14 +#define VK_KHR_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_KHR_video_encode_h265" + +typedef enum VkVideoEncodeH265CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265CapabilityFlagsKHR; + +typedef enum VkVideoEncodeH265StdFlagBitsKHR { + VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR = 0x00000800, + VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR = 0x00001000, + VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR = 0x00002000, + VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR = 0x00004000, + VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR = 0x00008000, + VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR = 0x00010000, + VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR = 0x00020000, + VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR = 0x00040000, + VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR = 0x00080000, + VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR = 0x00100000, + VK_VIDEO_ENCODE_H265_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265StdFlagsKHR; + +typedef enum VkVideoEncodeH265CtbSizeFlagBitsKHR { + VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265CtbSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265CtbSizeFlagsKHR; + +typedef enum VkVideoEncodeH265TransformBlockSizeFlagBitsKHR { + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265TransformBlockSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265TransformBlockSizeFlagsKHR; + +typedef enum VkVideoEncodeH265RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265RateControlFlagsKHR; +typedef struct VkVideoEncodeH265CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265CapabilityFlagsKHR flags; + StdVideoH265LevelIdc maxLevelIdc; + uint32_t maxSliceSegmentCount; + VkExtent2D maxTiles; + VkVideoEncodeH265CtbSizeFlagsKHR ctbSizes; + VkVideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes; + uint32_t maxPPictureL0ReferenceCount; + uint32_t maxBPictureL0ReferenceCount; + uint32_t maxL1ReferenceCount; + uint32_t maxSubLayerCount; + VkBool32 expectDyadicTemporalSubLayerPattern; + int32_t minQp; + int32_t maxQp; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeH265StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeH265CapabilitiesKHR; + +typedef struct VkVideoEncodeH265SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevelIdc; + StdVideoH265LevelIdc maxLevelIdc; +} VkVideoEncodeH265SessionCreateInfoKHR; + +typedef struct VkVideoEncodeH265QpKHR { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH265QpKHR; + +typedef struct VkVideoEncodeH265QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredIdrPeriod; + uint32_t preferredConsecutiveBFrameCount; + uint32_t preferredSubLayerCount; + VkVideoEncodeH265QpKHR preferredConstantQp; + uint32_t preferredMaxL0ReferenceCount; + uint32_t preferredMaxL1ReferenceCount; +} VkVideoEncodeH265QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeH265SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdVPSCount; + const StdVideoH265VideoParameterSet* pStdVPSs; + uint32_t stdSPSCount; + const StdVideoH265SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH265PictureParameterSet* pStdPPSs; +} VkVideoEncodeH265SessionParametersAddInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdVPSCount; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoEncodeH265SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoEncodeH265SessionParametersCreateInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 writeStdVPS; + VkBool32 writeStdSPS; + VkBool32 writeStdPPS; + uint32_t stdVPSId; + uint32_t stdSPSId; + uint32_t stdPPSId; +} VkVideoEncodeH265SessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasStdVPSOverrides; + VkBool32 hasStdSPSOverrides; + VkBool32 hasStdPPSOverrides; +} VkVideoEncodeH265SessionParametersFeedbackInfoKHR; + +typedef struct VkVideoEncodeH265NaluSliceSegmentInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t constantQp; + const StdVideoEncodeH265SliceSegmentHeader* pStdSliceSegmentHeader; +} VkVideoEncodeH265NaluSliceSegmentInfoKHR; + +typedef struct VkVideoEncodeH265PictureInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t naluSliceSegmentEntryCount; + const VkVideoEncodeH265NaluSliceSegmentInfoKHR* pNaluSliceSegmentEntries; + const StdVideoEncodeH265PictureInfo* pStdPictureInfo; +} VkVideoEncodeH265PictureInfoKHR; + +typedef struct VkVideoEncodeH265DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH265DpbSlotInfoKHR; + +typedef struct VkVideoEncodeH265ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoEncodeH265ProfileInfoKHR; + +typedef struct VkVideoEncodeH265RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH265RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + uint32_t subLayerCount; +} VkVideoEncodeH265RateControlInfoKHR; + +typedef struct VkVideoEncodeH265FrameSizeKHR { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH265FrameSizeKHR; + +typedef struct VkVideoEncodeH265RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQp; + VkVideoEncodeH265QpKHR minQp; + VkBool32 useMaxQp; + VkVideoEncodeH265QpKHR maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH265FrameSizeKHR maxFrameSize; +} VkVideoEncodeH265RateControlLayerInfoKHR; + +typedef struct VkVideoEncodeH265GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingI; + uint32_t gopRemainingP; + uint32_t gopRemainingB; +} VkVideoEncodeH265GopRemainingFrameInfoKHR; + + + +// VK_KHR_video_decode_h264 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_h264 1 +#include "vk_video/vulkan_video_codec_h264std_decode.h" +#define VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION 9 +#define VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME "VK_KHR_video_decode_h264" + +typedef enum VkVideoDecodeH264PictureLayoutFlagBitsKHR { + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR = 0, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeH264PictureLayoutFlagBitsKHR; +typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsKHR; +typedef struct VkVideoDecodeH264ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; + VkVideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout; +} VkVideoDecodeH264ProfileInfoKHR; + +typedef struct VkVideoDecodeH264CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoH264LevelIdc maxLevelIdc; + VkOffset2D fieldOffsetGranularity; +} VkVideoDecodeH264CapabilitiesKHR; + +typedef struct VkVideoDecodeH264SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdSPSCount; + const StdVideoH264SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH264PictureParameterSet* pStdPPSs; +} VkVideoDecodeH264SessionParametersAddInfoKHR; + +typedef struct VkVideoDecodeH264SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoDecodeH264SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoDecodeH264SessionParametersCreateInfoKHR; + +typedef struct VkVideoDecodeH264PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264PictureInfo* pStdPictureInfo; + uint32_t sliceCount; + const uint32_t* pSliceOffsets; +} VkVideoDecodeH264PictureInfoKHR; + +typedef struct VkVideoDecodeH264DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH264DpbSlotInfoKHR; + + + +// VK_KHR_dynamic_rendering is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dynamic_rendering 1 +#define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1 +#define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering" +typedef VkRenderingFlags VkRenderingFlagsKHR; + +typedef VkRenderingFlagBits VkRenderingFlagBitsKHR; + +typedef VkRenderingInfo VkRenderingInfoKHR; + +typedef VkRenderingAttachmentInfo VkRenderingAttachmentInfoKHR; + +typedef VkPipelineRenderingCreateInfo VkPipelineRenderingCreateInfoKHR; + +typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderingFeaturesKHR; + +typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR; + +typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkExtent2D shadingRateAttachmentTexelSize; +} VkRenderingFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; +} VkRenderingFragmentDensityMapAttachmentInfoEXT; + +typedef struct VkAttachmentSampleCountInfoAMD { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const VkSampleCountFlagBits* pColorAttachmentSamples; + VkSampleCountFlagBits depthStencilAttachmentSamples; +} VkAttachmentSampleCountInfoAMD; + +typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; + +typedef struct VkMultiviewPerViewAttributesInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 perViewAttributes; + VkBool32 perViewAttributesPositionXOnly; +} VkMultiviewPerViewAttributesInfoNVX; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderingKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR( + VkCommandBuffer commandBuffer); +#endif + + +// VK_KHR_multiview is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; + +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; + +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; + + + +// VK_KHR_get_physical_device_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; + +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; + +typedef VkFormatProperties2 VkFormatProperties2KHR; + +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; + +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; + +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; + +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; + +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; + +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif + + +// VK_KHR_device_group is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; + +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; + +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; + +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; + +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; + +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; + +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; + +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; + +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; + +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; + +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif + + +// VK_KHR_shader_draw_parameters is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" + + +// VK_KHR_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" +#define VK_KHR_MAINTENANCE1_SPEC_VERSION VK_KHR_MAINTENANCE_1_SPEC_VERSION +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; + +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif + + +// VK_KHR_device_group_creation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; + +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +#endif + + +// VK_KHR_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; + +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; + +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; + +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; + +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; + +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; + +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; + +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; + +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; + +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); +#endif + + +// VK_KHR_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; + +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; + +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; + + + +// VK_KHR_external_memory_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; + +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; + +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); +#endif + + +// VK_KHR_external_semaphore_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; + +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; + +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; + +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; + +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif + + +// VK_KHR_external_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; + +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; + +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; + + + +// VK_KHR_external_semaphore_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; + +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( + VkDevice device, + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( + VkDevice device, + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +// VK_KHR_push_descriptor is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" +typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); +#endif + + +// VK_KHR_shader_float16_int8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float16_int8 1 +#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; + +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR; + + + +// VK_KHR_16bit_storage is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; + + + +// VK_KHR_incremental_present is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; + +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; + +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; + + + +// VK_KHR_descriptor_update_template is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; + +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; + +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; + +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; + +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif + + +// VK_KHR_imageless_framebuffer is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_imageless_framebuffer 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" +typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR; + +typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR; + +typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR; + +typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR; + + + +// VK_KHR_create_renderpass2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_create_renderpass2 1 +#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 +#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" +typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR; + +typedef VkAttachmentDescription2 VkAttachmentDescription2KHR; + +typedef VkAttachmentReference2 VkAttachmentReference2KHR; + +typedef VkSubpassDescription2 VkSubpassDescription2KHR; + +typedef VkSubpassDependency2 VkSubpassDependency2KHR; + +typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR; + +typedef VkSubpassEndInfo VkSubpassEndInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); +#endif + + +// VK_KHR_shared_presentable_image is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( + VkDevice device, + VkSwapchainKHR swapchain); +#endif + + +// VK_KHR_external_fence_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; + +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; + +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; + +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; + +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); +#endif + + +// VK_KHR_external_fence is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" +typedef VkFenceImportFlags VkFenceImportFlagsKHR; + +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; + +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; + + + +// VK_KHR_external_fence_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +// VK_KHR_performance_query is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_performance_query 1 +#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1 +#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query" + +typedef enum VkPerformanceCounterUnitKHR { + VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0, + VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1, + VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4, + VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5, + VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6, + VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7, + VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8, + VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9, + VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10, + VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterUnitKHR; + +typedef enum VkPerformanceCounterScopeKHR { + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, + VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, + VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterScopeKHR; + +typedef enum VkPerformanceCounterStorageKHR { + VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0, + VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1, + VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2, + VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5, + VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterStorageKHR; + +typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterDescriptionFlagBitsKHR; +typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR; + +typedef enum VkAcquireProfilingLockFlagBitsKHR { + VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAcquireProfilingLockFlagBitsKHR; +typedef VkFlags VkAcquireProfilingLockFlagsKHR; +typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 performanceCounterQueryPools; + VkBool32 performanceCounterMultipleQueryPools; +} VkPhysicalDevicePerformanceQueryFeaturesKHR; + +typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 allowCommandBufferQueryCopies; +} VkPhysicalDevicePerformanceQueryPropertiesKHR; + +typedef struct VkPerformanceCounterKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterUnitKHR unit; + VkPerformanceCounterScopeKHR scope; + VkPerformanceCounterStorageKHR storage; + uint8_t uuid[VK_UUID_SIZE]; +} VkPerformanceCounterKHR; + +typedef struct VkPerformanceCounterDescriptionKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterDescriptionFlagsKHR flags; + char name[VK_MAX_DESCRIPTION_SIZE]; + char category[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkPerformanceCounterDescriptionKHR; + +typedef struct VkQueryPoolPerformanceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + uint32_t counterIndexCount; + const uint32_t* pCounterIndices; +} VkQueryPoolPerformanceCreateInfoKHR; + +typedef union VkPerformanceCounterResultKHR { + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; +} VkPerformanceCounterResultKHR; + +typedef struct VkAcquireProfilingLockInfoKHR { + VkStructureType sType; + const void* pNext; + VkAcquireProfilingLockFlagsKHR flags; + uint64_t timeout; +} VkAcquireProfilingLockInfoKHR; + +typedef struct VkPerformanceQuerySubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t counterPassIndex; +} VkPerformanceQuerySubmitInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t* pCounterCount, + VkPerformanceCounterKHR* pCounters, + VkPerformanceCounterDescriptionKHR* pCounterDescriptions); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, + uint32_t* pNumPasses); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR( + VkDevice device, + const VkAcquireProfilingLockInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( + VkDevice device); +#endif + + +// VK_KHR_maintenance2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" +#define VK_KHR_MAINTENANCE2_SPEC_VERSION VK_KHR_MAINTENANCE_2_SPEC_VERSION +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; + +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; + +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; + +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; + +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; + +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; + + + +// VK_KHR_get_surface_capabilities2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; + +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; + +typedef struct VkSurfaceFormat2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); +#endif + + +// VK_KHR_variable_pointers is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; + +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR; + + + +// VK_KHR_get_display_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_display_properties2 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" +typedef struct VkDisplayProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPropertiesKHR displayProperties; +} VkDisplayProperties2KHR; + +typedef struct VkDisplayPlaneProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlanePropertiesKHR displayPlaneProperties; +} VkDisplayPlaneProperties2KHR; + +typedef struct VkDisplayModeProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayModePropertiesKHR displayModeProperties; +} VkDisplayModeProperties2KHR; + +typedef struct VkDisplayPlaneInfo2KHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeKHR mode; + uint32_t planeIndex; +} VkDisplayPlaneInfo2KHR; + +typedef struct VkDisplayPlaneCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlaneCapabilitiesKHR capabilities; +} VkDisplayPlaneCapabilities2KHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlaneProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModeProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR* pCapabilities); +#endif + + +// VK_KHR_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; + +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; + + + +// VK_KHR_storage_buffer_storage_class is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" + + +// VK_KHR_relaxed_block_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" + + +// VK_KHR_get_memory_requirements2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; + +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; + +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; + +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_KHR_image_format_list is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" +typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR; + + + +// VK_KHR_sampler_ycbcr_conversion is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; + +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; + +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; + +typedef VkChromaLocation VkChromaLocationKHR; + +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; + +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; + +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; + +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; + +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_KHR_bind_memory2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; + +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif + + +// VK_KHR_maintenance3 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" +#define VK_KHR_MAINTENANCE3_SPEC_VERSION VK_KHR_MAINTENANCE_3_SPEC_VERSION +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; + +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + + +// VK_KHR_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_draw_indirect_count 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_KHR_shader_subgroup_extended_types is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_extended_types 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types" +typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; + + + +// VK_KHR_8bit_storage is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_8bit_storage 1 +#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" +typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR; + + + +// VK_KHR_shader_atomic_int64 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_atomic_int64 1 +#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" +typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; + + + +// VK_KHR_shader_clock is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_clock 1 +#define VK_KHR_SHADER_CLOCK_SPEC_VERSION 1 +#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock" +typedef struct VkPhysicalDeviceShaderClockFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupClock; + VkBool32 shaderDeviceClock; +} VkPhysicalDeviceShaderClockFeaturesKHR; + + + +// VK_KHR_video_decode_h265 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_h265 1 +#include "vk_video/vulkan_video_codec_h265std_decode.h" +#define VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION 8 +#define VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME "VK_KHR_video_decode_h265" +typedef struct VkVideoDecodeH265ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoDecodeH265ProfileInfoKHR; + +typedef struct VkVideoDecodeH265CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoH265LevelIdc maxLevelIdc; +} VkVideoDecodeH265CapabilitiesKHR; + +typedef struct VkVideoDecodeH265SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdVPSCount; + const StdVideoH265VideoParameterSet* pStdVPSs; + uint32_t stdSPSCount; + const StdVideoH265SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH265PictureParameterSet* pStdPPSs; +} VkVideoDecodeH265SessionParametersAddInfoKHR; + +typedef struct VkVideoDecodeH265SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdVPSCount; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoDecodeH265SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoDecodeH265SessionParametersCreateInfoKHR; + +typedef struct VkVideoDecodeH265PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265PictureInfo* pStdPictureInfo; + uint32_t sliceSegmentCount; + const uint32_t* pSliceSegmentOffsets; +} VkVideoDecodeH265PictureInfoKHR; + +typedef struct VkVideoDecodeH265DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH265DpbSlotInfoKHR; + + + +// VK_KHR_global_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_global_priority 1 +#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR 16U +#define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1 +#define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority" + +typedef enum VkQueueGlobalPriorityKHR { + VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = 1024, + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR, + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueueGlobalPriorityKHR; +typedef struct VkDeviceQueueGlobalPriorityCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriorityKHR globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfoKHR; + +typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; +} VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; + +typedef struct VkQueueFamilyGlobalPriorityPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t priorityCount; + VkQueueGlobalPriorityKHR priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_KHR]; +} VkQueueFamilyGlobalPriorityPropertiesKHR; + + + +// VK_KHR_driver_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_driver_properties 1 +#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" +#define VK_MAX_DRIVER_NAME_SIZE_KHR VK_MAX_DRIVER_NAME_SIZE +#define VK_MAX_DRIVER_INFO_SIZE_KHR VK_MAX_DRIVER_INFO_SIZE +typedef VkDriverId VkDriverIdKHR; + +typedef VkConformanceVersion VkConformanceVersionKHR; + +typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR; + + + +// VK_KHR_shader_float_controls is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float_controls 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 +#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" +typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR; + +typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR; + + + +// VK_KHR_depth_stencil_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_depth_stencil_resolve 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" +typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR; + +typedef VkResolveModeFlags VkResolveModeFlagsKHR; + +typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR; + +typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR; + + + +// VK_KHR_swapchain_mutable_format is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_swapchain_mutable_format 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" + + +// VK_KHR_timeline_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_timeline_semaphore 1 +#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2 +#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore" +typedef VkSemaphoreType VkSemaphoreTypeKHR; + +typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR; + +typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR; + +typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR; + +typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR; + +typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR; + +typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR; + +typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR; + +typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); +#endif + + +// VK_KHR_vulkan_memory_model is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_vulkan_memory_model 1 +#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 +#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" +typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; + + + +// VK_KHR_shader_terminate_invocation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_terminate_invocation 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" +typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR; + + + +// VK_KHR_fragment_shading_rate is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_fragment_shading_rate 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2 +#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" + +typedef enum VkFragmentShadingRateCombinerOpKHR { + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkFragmentShadingRateCombinerOpKHR; +typedef struct VkFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + const VkAttachmentReference2* pFragmentShadingRateAttachment; + VkExtent2D shadingRateAttachmentTexelSize; +} VkFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D fragmentSize; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateStateCreateInfoKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineFragmentShadingRate; + VkBool32 primitiveFragmentShadingRate; + VkBool32 attachmentFragmentShadingRate; +} VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentShadingRateAttachmentTexelSize; + VkExtent2D maxFragmentShadingRateAttachmentTexelSize; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio; + VkBool32 primitiveFragmentShadingRateWithMultipleViewports; + VkBool32 layeredShadingRateAttachments; + VkBool32 fragmentShadingRateNonTrivialCombinerOps; + VkExtent2D maxFragmentSize; + uint32_t maxFragmentSizeAspectRatio; + uint32_t maxFragmentShadingRateCoverageSamples; + VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples; + VkBool32 fragmentShadingRateWithShaderDepthStencilWrites; + VkBool32 fragmentShadingRateWithSampleMask; + VkBool32 fragmentShadingRateWithShaderSampleMask; + VkBool32 fragmentShadingRateWithConservativeRasterization; + VkBool32 fragmentShadingRateWithFragmentShaderInterlock; + VkBool32 fragmentShadingRateWithCustomSampleLocations; + VkBool32 fragmentShadingRateStrictMultiplyCombiner; +} VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateKHR { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleCounts; + VkExtent2D fragmentSize; +} VkPhysicalDeviceFragmentShadingRateKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( + VkCommandBuffer commandBuffer, + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +// VK_KHR_spirv_1_4 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_spirv_1_4 1 +#define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 +#define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" + + +// VK_KHR_surface_protected_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_surface_protected_capabilities 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" +typedef struct VkSurfaceProtectedCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + VkBool32 supportsProtected; +} VkSurfaceProtectedCapabilitiesKHR; + + + +// VK_KHR_separate_depth_stencil_layouts is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_separate_depth_stencil_layouts 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts" +typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; + +typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR; + +typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR; + + + +// VK_KHR_present_wait is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_present_wait 1 +#define VK_KHR_PRESENT_WAIT_SPEC_VERSION 1 +#define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait" +typedef struct VkPhysicalDevicePresentWaitFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 presentWait; +} VkPhysicalDevicePresentWaitFeaturesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresentKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout); +#endif + + +// VK_KHR_uniform_buffer_standard_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_uniform_buffer_standard_layout 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" +typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + + + +// VK_KHR_buffer_device_address is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_buffer_device_address 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address" +typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR; + +typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR; + +typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR; + +typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +// VK_KHR_deferred_host_operations is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_deferred_host_operations 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) +#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 +#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" +typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); +typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); +typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation); +#endif + + +// VK_KHR_pipeline_executable_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_executable_properties 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" + +typedef enum VkPipelineExecutableStatisticFormatKHR { + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPipelineExecutableStatisticFormatKHR; +typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineExecutableInfo; +} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +typedef struct VkPipelineInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; +} VkPipelineInfoKHR; + +typedef struct VkPipelineExecutablePropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags stages; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t subgroupSize; +} VkPipelineExecutablePropertiesKHR; + +typedef struct VkPipelineExecutableInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; + uint32_t executableIndex; +} VkPipelineExecutableInfoKHR; + +typedef union VkPipelineExecutableStatisticValueKHR { + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +} VkPipelineExecutableStatisticValueKHR; + +typedef struct VkPipelineExecutableStatisticKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkPipelineExecutableStatisticFormatKHR format; + VkPipelineExecutableStatisticValueKHR value; +} VkPipelineExecutableStatisticKHR; + +typedef struct VkPipelineExecutableInternalRepresentationKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkBool32 isText; + size_t dataSize; + void* pData; +} VkPipelineExecutableInternalRepresentationKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR( + VkDevice device, + const VkPipelineInfoKHR* pPipelineInfo, + uint32_t* pExecutableCount, + VkPipelineExecutablePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pStatisticCount, + VkPipelineExecutableStatisticKHR* pStatistics); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +#endif + + +// VK_KHR_map_memory2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_map_memory2 1 +#define VK_KHR_MAP_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_MAP_MEMORY_2_EXTENSION_NAME "VK_KHR_map_memory2" +typedef VkFlags VkMemoryUnmapFlagsKHR; +typedef struct VkMemoryMapInfoKHR { + VkStructureType sType; + const void* pNext; + VkMemoryMapFlags flags; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMemoryMapInfoKHR; + +typedef struct VkMemoryUnmapInfoKHR { + VkStructureType sType; + const void* pNext; + VkMemoryUnmapFlagsKHR flags; + VkDeviceMemory memory; +} VkMemoryUnmapInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2KHR)(VkDevice device, const VkMemoryMapInfoKHR* pMemoryMapInfo, void** ppData); +typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2KHR)(VkDevice device, const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory2KHR( + VkDevice device, + const VkMemoryMapInfoKHR* pMemoryMapInfo, + void** ppData); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2KHR( + VkDevice device, + const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo); +#endif + + +// VK_KHR_shader_integer_dot_product is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_integer_dot_product 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product" +typedef VkPhysicalDeviceShaderIntegerDotProductFeatures VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR; + +typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR; + + + +// VK_KHR_pipeline_library is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_library 1 +#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" +typedef struct VkPipelineLibraryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t libraryCount; + const VkPipeline* pLibraries; +} VkPipelineLibraryCreateInfoKHR; + + + +// VK_KHR_shader_non_semantic_info is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_non_semantic_info 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" + + +// VK_KHR_present_id is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_present_id 1 +#define VK_KHR_PRESENT_ID_SPEC_VERSION 1 +#define VK_KHR_PRESENT_ID_EXTENSION_NAME "VK_KHR_present_id" +typedef struct VkPresentIdKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint64_t* pPresentIds; +} VkPresentIdKHR; + +typedef struct VkPhysicalDevicePresentIdFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 presentId; +} VkPhysicalDevicePresentIdFeaturesKHR; + + + +// VK_KHR_video_encode_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_queue 1 +#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 12 +#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" + +typedef enum VkVideoEncodeTuningModeKHR { + VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR = 1, + VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR = 2, + VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR = 3, + VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4, + VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeTuningModeKHR; +typedef VkFlags VkVideoEncodeFlagsKHR; + +typedef enum VkVideoEncodeCapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeCapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeCapabilityFlagsKHR; + +typedef enum VkVideoEncodeRateControlModeFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlModeFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; + +typedef enum VkVideoEncodeFeedbackFlagBitsKHR { + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_FEEDBACK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFeedbackFlagBitsKHR; +typedef VkFlags VkVideoEncodeFeedbackFlagsKHR; + +typedef enum VkVideoEncodeUsageFlagBitsKHR { + VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeUsageFlagBitsKHR; +typedef VkFlags VkVideoEncodeUsageFlagsKHR; + +typedef enum VkVideoEncodeContentFlagBitsKHR { + VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_CONTENT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeContentFlagBitsKHR; +typedef VkFlags VkVideoEncodeContentFlagsKHR; +typedef VkFlags VkVideoEncodeRateControlFlagsKHR; +typedef struct VkVideoEncodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFlagsKHR flags; + VkBuffer dstBuffer; + VkDeviceSize dstBufferOffset; + VkDeviceSize dstBufferRange; + VkVideoPictureResourceInfoKHR srcPictureResource; + const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; + uint32_t precedingExternallyEncodedBytes; +} VkVideoEncodeInfoKHR; + +typedef struct VkVideoEncodeCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeCapabilityFlagsKHR flags; + VkVideoEncodeRateControlModeFlagsKHR rateControlModes; + uint32_t maxRateControlLayers; + uint64_t maxBitrate; + uint32_t maxQualityLevels; + VkExtent2D encodeInputPictureGranularity; + VkVideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags; +} VkVideoEncodeCapabilitiesKHR; + +typedef struct VkQueryPoolVideoEncodeFeedbackCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFeedbackFlagsKHR encodeFeedbackFlags; +} VkQueryPoolVideoEncodeFeedbackCreateInfoKHR; + +typedef struct VkVideoEncodeUsageInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeUsageFlagsKHR videoUsageHints; + VkVideoEncodeContentFlagsKHR videoContentHints; + VkVideoEncodeTuningModeKHR tuningMode; +} VkVideoEncodeUsageInfoKHR; + +typedef struct VkVideoEncodeRateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + uint64_t averageBitrate; + uint64_t maxBitrate; + uint32_t frameRateNumerator; + uint32_t frameRateDenominator; +} VkVideoEncodeRateControlLayerInfoKHR; + +typedef struct VkVideoEncodeRateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeRateControlFlagsKHR flags; + VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode; + uint32_t layerCount; + const VkVideoEncodeRateControlLayerInfoKHR* pLayers; + uint32_t virtualBufferSizeInMs; + uint32_t initialVirtualBufferSizeInMs; +} VkVideoEncodeRateControlInfoKHR; + +typedef struct VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR { + VkStructureType sType; + const void* pNext; + const VkVideoProfileInfoKHR* pVideoProfile; + uint32_t qualityLevel; +} VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR; + +typedef struct VkVideoEncodeQualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode; + uint32_t preferredRateControlLayerCount; +} VkVideoEncodeQualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeQualityLevelInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t qualityLevel; +} VkVideoEncodeQualityLevelInfoKHR; + +typedef struct VkVideoEncodeSessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersKHR videoSessionParameters; +} VkVideoEncodeSessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeSessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasOverrides; +} VkVideoEncodeSessionParametersFeedbackInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetEncodedVideoSessionParametersKHR)(VkDevice device, const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, size_t* pDataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEncodedVideoSessionParametersKHR( + VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR* pEncodeInfo); +#endif + + +// VK_KHR_synchronization2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_synchronization2 1 +#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 +#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" +typedef VkPipelineStageFlags2 VkPipelineStageFlags2KHR; + +typedef VkPipelineStageFlagBits2 VkPipelineStageFlagBits2KHR; + +typedef VkAccessFlags2 VkAccessFlags2KHR; + +typedef VkAccessFlagBits2 VkAccessFlagBits2KHR; + +typedef VkSubmitFlagBits VkSubmitFlagBitsKHR; + +typedef VkSubmitFlags VkSubmitFlagsKHR; + +typedef VkMemoryBarrier2 VkMemoryBarrier2KHR; + +typedef VkBufferMemoryBarrier2 VkBufferMemoryBarrier2KHR; + +typedef VkImageMemoryBarrier2 VkImageMemoryBarrier2KHR; + +typedef VkDependencyInfo VkDependencyInfoKHR; + +typedef VkSubmitInfo2 VkSubmitInfo2KHR; + +typedef VkSemaphoreSubmitInfo VkSemaphoreSubmitInfoKHR; + +typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR; + +typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR; + +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR( + VkCommandBuffer commandBuffer, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); +#endif + + +// VK_KHR_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_fragment_shader_barycentric 1 +#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric" +typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderBarycentric; +} VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + +typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 triStripVertexOrderIndependentOfProvokingVertex; +} VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + + +// VK_KHR_shader_subgroup_uniform_control_flow is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_uniform_control_flow 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow" +typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupUniformControlFlow; +} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + + +// VK_KHR_zero_initialize_workgroup_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_zero_initialize_workgroup_memory 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" +typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + + + +// VK_KHR_workgroup_memory_explicit_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_workgroup_memory_explicit_layout 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" +typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 workgroupMemoryExplicitLayout; + VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout; + VkBool32 workgroupMemoryExplicitLayout8BitAccess; + VkBool32 workgroupMemoryExplicitLayout16BitAccess; +} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + + +// VK_KHR_copy_commands2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_copy_commands2 1 +#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 +#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" +typedef VkCopyBufferInfo2 VkCopyBufferInfo2KHR; + +typedef VkCopyImageInfo2 VkCopyImageInfo2KHR; + +typedef VkCopyBufferToImageInfo2 VkCopyBufferToImageInfo2KHR; + +typedef VkCopyImageToBufferInfo2 VkCopyImageToBufferInfo2KHR; + +typedef VkBlitImageInfo2 VkBlitImageInfo2KHR; + +typedef VkResolveImageInfo2 VkResolveImageInfo2KHR; + +typedef VkBufferCopy2 VkBufferCopy2KHR; + +typedef VkImageCopy2 VkImageCopy2KHR; + +typedef VkImageBlit2 VkImageBlit2KHR; + +typedef VkBufferImageCopy2 VkBufferImageCopy2KHR; + +typedef VkImageResolve2 VkImageResolve2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); +#endif + + +// VK_KHR_format_feature_flags2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_format_feature_flags2 1 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2" +typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR; + +typedef VkFormatFeatureFlagBits2 VkFormatFeatureFlagBits2KHR; + +typedef VkFormatProperties3 VkFormatProperties3KHR; + + + +// VK_KHR_ray_tracing_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_maintenance1 1 +#define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1" +typedef struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingMaintenance1; + VkBool32 rayTracingPipelineTraceRaysIndirect2; +} VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; + +typedef struct VkTraceRaysIndirectCommand2KHR { + VkDeviceAddress raygenShaderRecordAddress; + VkDeviceSize raygenShaderRecordSize; + VkDeviceAddress missShaderBindingTableAddress; + VkDeviceSize missShaderBindingTableSize; + VkDeviceSize missShaderBindingTableStride; + VkDeviceAddress hitShaderBindingTableAddress; + VkDeviceSize hitShaderBindingTableSize; + VkDeviceSize hitShaderBindingTableStride; + VkDeviceAddress callableShaderBindingTableAddress; + VkDeviceSize callableShaderBindingTableSize; + VkDeviceSize callableShaderBindingTableStride; + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommand2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirect2KHR)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR( + VkCommandBuffer commandBuffer, + VkDeviceAddress indirectDeviceAddress); +#endif + + +// VK_KHR_portability_enumeration is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_portability_enumeration 1 +#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration" + + +// VK_KHR_maintenance4 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance4 1 +#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4" +typedef VkPhysicalDeviceMaintenance4Features VkPhysicalDeviceMaintenance4FeaturesKHR; + +typedef VkPhysicalDeviceMaintenance4Properties VkPhysicalDeviceMaintenance4PropertiesKHR; + +typedef VkDeviceBufferMemoryRequirements VkDeviceBufferMemoryRequirementsKHR; + +typedef VkDeviceImageMemoryRequirements VkDeviceImageMemoryRequirementsKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirementsKHR)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirementsKHR( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_KHR_maintenance5 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance5 1 +#define VK_KHR_MAINTENANCE_5_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_5_EXTENSION_NAME "VK_KHR_maintenance5" +typedef VkFlags64 VkPipelineCreateFlags2KHR; + +// Flag bits for VkPipelineCreateFlagBits2KHR +typedef VkFlags64 VkPipelineCreateFlagBits2KHR; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV = 0x00000020ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV = 0x00040000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; + +typedef VkFlags64 VkBufferUsageFlags2KHR; + +// Flag bits for VkBufferUsageFlagBits2KHR +typedef VkFlags64 VkBufferUsageFlagBits2KHR; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR = 0x00000020ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000ULL; +#endif +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT = 0x01000000ULL; + +typedef struct VkPhysicalDeviceMaintenance5FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance5; +} VkPhysicalDeviceMaintenance5FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance5PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; +} VkPhysicalDeviceMaintenance5PropertiesKHR; + +typedef struct VkRenderingAreaInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkRenderingAreaInfoKHR; + +typedef struct VkImageSubresource2KHR { + VkStructureType sType; + void* pNext; + VkImageSubresource imageSubresource; +} VkImageSubresource2KHR; + +typedef struct VkDeviceImageSubresourceInfoKHR { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + const VkImageSubresource2KHR* pSubresource; +} VkDeviceImageSubresourceInfoKHR; + +typedef struct VkSubresourceLayout2KHR { + VkStructureType sType; + void* pNext; + VkSubresourceLayout subresourceLayout; +} VkSubresourceLayout2KHR; + +typedef struct VkPipelineCreateFlags2CreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags2KHR flags; +} VkPipelineCreateFlags2CreateInfoKHR; + +typedef struct VkBufferUsageFlags2CreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBufferUsageFlags2KHR usage; +} VkBufferUsageFlags2CreateInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2KHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularityKHR)(VkDevice device, const VkRenderingAreaInfoKHR* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayoutKHR)(VkDevice device, const VkDeviceImageSubresourceInfoKHR* pInfo, VkSubresourceLayout2KHR* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2KHR)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2KHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularityKHR( + VkDevice device, + const VkRenderingAreaInfoKHR* pRenderingAreaInfo, + VkExtent2D* pGranularity); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayoutKHR( + VkDevice device, + const VkDeviceImageSubresourceInfoKHR* pInfo, + VkSubresourceLayout2KHR* pLayout); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2KHR( + VkDevice device, + VkImage image, + const VkImageSubresource2KHR* pSubresource, + VkSubresourceLayout2KHR* pLayout); +#endif + + +// VK_KHR_ray_tracing_position_fetch is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_position_fetch 1 +#define VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME "VK_KHR_ray_tracing_position_fetch" +typedef struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPositionFetch; +} VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + + +// VK_KHR_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_cooperative_matrix 1 +#define VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION 2 +#define VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_KHR_cooperative_matrix" + +typedef enum VkComponentTypeKHR { + VK_COMPONENT_TYPE_FLOAT16_KHR = 0, + VK_COMPONENT_TYPE_FLOAT32_KHR = 1, + VK_COMPONENT_TYPE_FLOAT64_KHR = 2, + VK_COMPONENT_TYPE_SINT8_KHR = 3, + VK_COMPONENT_TYPE_SINT16_KHR = 4, + VK_COMPONENT_TYPE_SINT32_KHR = 5, + VK_COMPONENT_TYPE_SINT64_KHR = 6, + VK_COMPONENT_TYPE_UINT8_KHR = 7, + VK_COMPONENT_TYPE_UINT16_KHR = 8, + VK_COMPONENT_TYPE_UINT32_KHR = 9, + VK_COMPONENT_TYPE_UINT64_KHR = 10, + VK_COMPONENT_TYPE_FLOAT16_NV = VK_COMPONENT_TYPE_FLOAT16_KHR, + VK_COMPONENT_TYPE_FLOAT32_NV = VK_COMPONENT_TYPE_FLOAT32_KHR, + VK_COMPONENT_TYPE_FLOAT64_NV = VK_COMPONENT_TYPE_FLOAT64_KHR, + VK_COMPONENT_TYPE_SINT8_NV = VK_COMPONENT_TYPE_SINT8_KHR, + VK_COMPONENT_TYPE_SINT16_NV = VK_COMPONENT_TYPE_SINT16_KHR, + VK_COMPONENT_TYPE_SINT32_NV = VK_COMPONENT_TYPE_SINT32_KHR, + VK_COMPONENT_TYPE_SINT64_NV = VK_COMPONENT_TYPE_SINT64_KHR, + VK_COMPONENT_TYPE_UINT8_NV = VK_COMPONENT_TYPE_UINT8_KHR, + VK_COMPONENT_TYPE_UINT16_NV = VK_COMPONENT_TYPE_UINT16_KHR, + VK_COMPONENT_TYPE_UINT32_NV = VK_COMPONENT_TYPE_UINT32_KHR, + VK_COMPONENT_TYPE_UINT64_NV = VK_COMPONENT_TYPE_UINT64_KHR, + VK_COMPONENT_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkComponentTypeKHR; + +typedef enum VkScopeKHR { + VK_SCOPE_DEVICE_KHR = 1, + VK_SCOPE_WORKGROUP_KHR = 2, + VK_SCOPE_SUBGROUP_KHR = 3, + VK_SCOPE_QUEUE_FAMILY_KHR = 5, + VK_SCOPE_DEVICE_NV = VK_SCOPE_DEVICE_KHR, + VK_SCOPE_WORKGROUP_NV = VK_SCOPE_WORKGROUP_KHR, + VK_SCOPE_SUBGROUP_NV = VK_SCOPE_SUBGROUP_KHR, + VK_SCOPE_QUEUE_FAMILY_NV = VK_SCOPE_QUEUE_FAMILY_KHR, + VK_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkScopeKHR; +typedef struct VkCooperativeMatrixPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeKHR AType; + VkComponentTypeKHR BType; + VkComponentTypeKHR CType; + VkComponentTypeKHR ResultType; + VkBool32 saturatingAccumulation; + VkScopeKHR scope; +} VkCooperativeMatrixPropertiesKHR; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesKHR; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesKHR* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesKHR* pProperties); +#endif + + +// VK_KHR_video_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_maintenance1 1 +#define VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_video_maintenance1" +typedef struct VkPhysicalDeviceVideoMaintenance1FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoMaintenance1; +} VkPhysicalDeviceVideoMaintenance1FeaturesKHR; + +typedef struct VkVideoInlineQueryInfoKHR { + VkStructureType sType; + const void* pNext; + VkQueryPool queryPool; + uint32_t firstQuery; + uint32_t queryCount; +} VkVideoInlineQueryInfoKHR; + + + +// VK_KHR_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_vertex_attribute_divisor 1 +#define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 1 +#define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_KHR_vertex_attribute_divisor" +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; + VkBool32 supportsNonZeroFirstInstance; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR; + +typedef struct VkVertexInputBindingDivisorDescriptionKHR { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescriptionKHR; + +typedef struct VkPipelineVertexInputDivisorStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescriptionKHR* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfoKHR; + +typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; +} VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR; + + + +// VK_KHR_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_calibrated_timestamps 1 +#define VK_KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION 1 +#define VK_KHR_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_KHR_calibrated_timestamps" + +typedef enum VkTimeDomainKHR { + VK_TIME_DOMAIN_DEVICE_KHR = 0, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR = 1, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR = 2, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR = 3, + VK_TIME_DOMAIN_DEVICE_EXT = VK_TIME_DOMAIN_DEVICE_KHR, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR, + VK_TIME_DOMAIN_MAX_ENUM_KHR = 0x7FFFFFFF +} VkTimeDomainKHR; +typedef struct VkCalibratedTimestampInfoKHR { + VkStructureType sType; + const void* pNext; + VkTimeDomainKHR timeDomain; +} VkCalibratedTimestampInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsKHR)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsKHR( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + + +// VK_KHR_maintenance6 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance6 1 +#define VK_KHR_MAINTENANCE_6_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_6_EXTENSION_NAME "VK_KHR_maintenance6" +typedef struct VkPhysicalDeviceMaintenance6FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance6; +} VkPhysicalDeviceMaintenance6FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance6PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 blockTexelViewCompatibleMultipleLayers; + uint32_t maxCombinedImageSamplerDescriptorCount; + VkBool32 fragmentShadingRateClampCombinerInputs; +} VkPhysicalDeviceMaintenance6PropertiesKHR; + +typedef struct VkBindMemoryStatusKHR { + VkStructureType sType; + const void* pNext; + VkResult* pResult; +} VkBindMemoryStatusKHR; + +typedef struct VkBindDescriptorSetsInfoKHR { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t descriptorSetCount; + const VkDescriptorSet* pDescriptorSets; + uint32_t dynamicOffsetCount; + const uint32_t* pDynamicOffsets; +} VkBindDescriptorSetsInfoKHR; + +typedef struct VkPushConstantsInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineLayout layout; + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; + const void* pValues; +} VkPushConstantsInfoKHR; + +typedef struct VkPushDescriptorSetInfoKHR { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; + uint32_t descriptorWriteCount; + const VkWriteDescriptorSet* pDescriptorWrites; +} VkPushDescriptorSetInfoKHR; + +typedef struct VkPushDescriptorSetWithTemplateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplate descriptorUpdateTemplate; + VkPipelineLayout layout; + uint32_t set; + const void* pData; +} VkPushDescriptorSetWithTemplateInfoKHR; + +typedef struct VkSetDescriptorBufferOffsetsInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t setCount; + const uint32_t* pBufferIndices; + const VkDeviceSize* pOffsets; +} VkSetDescriptorBufferOffsetsInfoEXT; + +typedef struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; +} VkBindDescriptorBufferEmbeddedSamplersInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2KHR)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2KHR)(VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR* pPushConstantsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsets2EXT)(VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT)(VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets2KHR( + VkCommandBuffer commandBuffer, + const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants2KHR( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfoKHR* pPushConstantsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsets2EXT( + VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); +#endif + + +// VK_EXT_debug_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT = 1000307000, + VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT = 1000307001, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + + +// VK_NV_glsl_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" + + +// VK_EXT_depth_range_unrestricted is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" + + +// VK_IMG_filter_cubic is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" + + +// VK_AMD_rasterization_order is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" + +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; + + + +// VK_AMD_shader_trinary_minmax is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" + + +// VK_AMD_shader_explicit_vertex_parameter is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" + + +// VK_EXT_debug_marker is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; + +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; + +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( + VkDevice device, + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( + VkDevice device, + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +#endif + + +// VK_AMD_gcn_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +// VK_NV_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" +typedef struct VkDedicatedAllocationImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; + +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; + +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; + + + +// VK_EXT_transform_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_transform_feedback 1 +#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" +typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; +typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 transformFeedback; + VkBool32 geometryStreams; +} VkPhysicalDeviceTransformFeedbackFeaturesEXT; + +typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTransformFeedbackStreams; + uint32_t maxTransformFeedbackBuffers; + VkDeviceSize maxTransformFeedbackBufferSize; + uint32_t maxTransformFeedbackStreamDataSize; + uint32_t maxTransformFeedbackBufferDataSize; + uint32_t maxTransformFeedbackBufferDataStride; + VkBool32 transformFeedbackQueries; + VkBool32 transformFeedbackStreamsLinesTriangles; + VkBool32 transformFeedbackRasterizationStreamSelect; + VkBool32 transformFeedbackDraw; +} VkPhysicalDeviceTransformFeedbackPropertiesEXT; + +typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateStreamCreateFlagsEXT flags; + uint32_t rasterizationStream; +} VkPipelineRasterizationStateStreamCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); +typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( + VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride); +#endif + + +// VK_NVX_binary_import is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_binary_import 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) +#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1 +#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" +typedef struct VkCuModuleCreateInfoNVX { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCuModuleCreateInfoNVX; + +typedef struct VkCuFunctionCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuModuleNVX module; + const char* pName; +} VkCuFunctionCreateInfoNVX; + +typedef struct VkCuLaunchInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuFunctionNVX function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCuLaunchInfoNVX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX( + VkDevice device, + const VkCuModuleCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuModuleNVX* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX( + VkDevice device, + const VkCuFunctionCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuFunctionNVX* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX( + VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX( + VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( + VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX* pLaunchInfo); +#endif + + +// VK_NVX_image_view_handle is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_image_view_handle 1 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 +#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" +typedef struct VkImageViewHandleInfoNVX { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkDescriptorType descriptorType; + VkSampler sampler; +} VkImageViewHandleInfoNVX; + +typedef struct VkImageViewAddressPropertiesNVX { + VkStructureType sType; + void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; +} VkImageViewAddressPropertiesNVX; + +typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( + VkDevice device, + const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( + VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX* pProperties); +#endif + + +// VK_AMD_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_AMD_negative_viewport_height is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +// VK_AMD_gpu_shader_half_float is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +// VK_AMD_shader_ballot is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +// VK_AMD_texture_gather_bias_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +// VK_AMD_shader_info is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + + +// VK_AMD_shader_image_load_store_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +// VK_NV_corner_sampled_image is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_corner_sampled_image 1 +#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 +#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" +typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cornerSampledImage; +} VkPhysicalDeviceCornerSampledImageFeaturesNV; + + + +// VK_IMG_format_pvrtc is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" + + +// VK_NV_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" + +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif + + +// VK_NV_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; + +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; + + + +// VK_EXT_validation_flags is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 3 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" + +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + const VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; + + + +// VK_EXT_shader_subgroup_ballot is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" + + +// VK_EXT_shader_subgroup_vote is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" + + +// VK_EXT_texture_compression_astc_hdr is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_texture_compression_astc_hdr 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" +typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + + + +// VK_EXT_astc_decode_mode is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_astc_decode_mode 1 +#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 +#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" +typedef struct VkImageViewASTCDecodeModeEXT { + VkStructureType sType; + const void* pNext; + VkFormat decodeMode; +} VkImageViewASTCDecodeModeEXT; + +typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 decodeModeSharedExponent; +} VkPhysicalDeviceASTCDecodeFeaturesEXT; + + + +// VK_EXT_pipeline_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_robustness 1 +#define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness" + +typedef enum VkPipelineRobustnessBufferBehaviorEXT { + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = 0, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = 1, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = 2, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = 3, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPipelineRobustnessBufferBehaviorEXT; + +typedef enum VkPipelineRobustnessImageBehaviorEXT { + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = 0, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = 1, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = 2, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = 3, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPipelineRobustnessImageBehaviorEXT; +typedef struct VkPhysicalDevicePipelineRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineRobustness; +} VkPhysicalDevicePipelineRobustnessFeaturesEXT; + +typedef struct VkPhysicalDevicePipelineRobustnessPropertiesEXT { + VkStructureType sType; + void* pNext; + VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers; + VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers; + VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs; + VkPipelineRobustnessImageBehaviorEXT defaultRobustnessImages; +} VkPhysicalDevicePipelineRobustnessPropertiesEXT; + +typedef struct VkPipelineRobustnessCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRobustnessBufferBehaviorEXT storageBuffers; + VkPipelineRobustnessBufferBehaviorEXT uniformBuffers; + VkPipelineRobustnessBufferBehaviorEXT vertexInputs; + VkPipelineRobustnessImageBehaviorEXT images; +} VkPipelineRobustnessCreateInfoEXT; + + + +// VK_EXT_conditional_rendering is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_conditional_rendering 1 +#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 +#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" + +typedef enum VkConditionalRenderingFlagBitsEXT { + VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, + VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConditionalRenderingFlagBitsEXT; +typedef VkFlags VkConditionalRenderingFlagsEXT; +typedef struct VkConditionalRenderingBeginInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceSize offset; + VkConditionalRenderingFlagsEXT flags; +} VkConditionalRenderingBeginInfoEXT; + +typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 conditionalRendering; + VkBool32 inheritedConditionalRendering; +} VkPhysicalDeviceConditionalRenderingFeaturesEXT; + +typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 conditionalRenderingEnable; +} VkCommandBufferInheritanceConditionalRenderingInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); +typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( + VkCommandBuffer commandBuffer); +#endif + + +// VK_NV_clip_space_w_scaling is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; + +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif + + +// VK_EXT_direct_mode_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif + + +// VK_EXT_display_surface_counter is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, + VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#endif + + +// VK_EXT_display_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" + +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; + +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; + +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; + +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; + +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; + +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif + + +// VK_GOOGLE_display_timing is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; + +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; + +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; + +typedef struct VkPresentTimesInfoGOOGLE { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; + +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif + + +// VK_NV_sample_mask_override_coverage is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" + + +// VK_NV_geometry_shader_passthrough is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" + + +// VK_NV_viewport_array2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME + + +// VK_NVX_multiview_per_view_attributes is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { + VkStructureType sType; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + + +// VK_NV_viewport_swizzle is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" + +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; + +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +// VK_EXT_discard_rectangles is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 2 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleModeEXT)(VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( + VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 discardRectangleEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleModeEXT( + VkCommandBuffer commandBuffer, + VkDiscardRectangleModeEXT discardRectangleMode); +#endif + + +// VK_EXT_conservative_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +// VK_EXT_depth_clip_enable is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clip_enable 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" +typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipEnable; +} VkPhysicalDeviceDepthClipEnableFeaturesEXT; + +typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags; + VkBool32 depthClipEnable; +} VkPipelineRasterizationDepthClipStateCreateInfoEXT; + + + +// VK_EXT_swapchain_colorspace is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" + + +// VK_EXT_hdr_metadata is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 2 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; + +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; + +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); +#endif + + +// VK_IMG_relaxed_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_relaxed_line_rasterization 1 +#define VK_IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME "VK_IMG_relaxed_line_rasterization" +typedef struct VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG { + VkStructureType sType; + void* pNext; + VkBool32 relaxedLineRasterization; +} VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + + +// VK_EXT_external_memory_dma_buf is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" + + +// VK_EXT_queue_family_foreign is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) + + +// VK_EXT_debug_utils is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; + +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; + +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT = 0x00000008, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; + +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + const VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + const VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + const VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); + +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#endif + + +// VK_EXT_sampler_filter_minmax is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" +typedef VkSamplerReductionMode VkSamplerReductionModeEXT; + +typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT; + +typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + + +// VK_AMD_gpu_shader_int16 is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" + + +// VK_AMD_mixed_attachment_samples is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" + + +// VK_AMD_shader_fragment_mask is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" + + +// VK_EXT_inline_uniform_block is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_inline_uniform_block 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" +typedef VkPhysicalDeviceInlineUniformBlockFeatures VkPhysicalDeviceInlineUniformBlockFeaturesEXT; + +typedef VkPhysicalDeviceInlineUniformBlockProperties VkPhysicalDeviceInlineUniformBlockPropertiesEXT; + +typedef VkWriteDescriptorSetInlineUniformBlock VkWriteDescriptorSetInlineUniformBlockEXT; + +typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUniformBlockCreateInfoEXT; + + + +// VK_EXT_shader_stencil_export is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" + + +// VK_EXT_sample_locations is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; + +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; + +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; + +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; + +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; + +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif + + +// VK_EXT_blend_operation_advanced is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" + +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +// VK_NV_fragment_coverage_to_color is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +// VK_NV_framebuffer_mixed_samples is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; + + + +// VK_NV_fill_rectangle is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" + + +// VK_NV_shader_sm_builtins is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_sm_builtins 1 +#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 +#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" +typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderSMCount; + uint32_t shaderWarpsPerSM; +} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + +typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderSMBuiltins; +} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + + +// VK_EXT_post_depth_coverage is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +// VK_EXT_image_drm_format_modifier is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_drm_format_modifier 1 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" +typedef struct VkDrmFormatModifierPropertiesEXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags drmFormatModifierTilingFeatures; +} VkDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierPropertiesListEXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesListEXT; + +typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + +typedef struct VkImageDrmFormatModifierListCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t drmFormatModifierCount; + const uint64_t* pDrmFormatModifiers; +} VkImageDrmFormatModifierListCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + const VkSubresourceLayout* pPlaneLayouts; +} VkImageDrmFormatModifierExplicitCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint64_t drmFormatModifier; +} VkImageDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierProperties2EXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags2 drmFormatModifierTilingFeatures; +} VkDrmFormatModifierProperties2EXT; + +typedef struct VkDrmFormatModifierPropertiesList2EXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierProperties2EXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesList2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT* pProperties); +#endif + + +// VK_EXT_validation_cache is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" + +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; +typedef VkFlags VkValidationCacheCreateFlagsEXT; +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( + VkDevice device, + const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( + VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); +#endif + + +// VK_EXT_descriptor_indexing is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" +typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT; + +typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT; + +typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +// VK_EXT_shader_viewport_index_layer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +// VK_NV_shading_rate_image is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shading_rate_image 1 +#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 +#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" + +typedef enum VkShadingRatePaletteEntryNV { + VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, + VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, + VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, + VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, + VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, + VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF +} VkShadingRatePaletteEntryNV; + +typedef enum VkCoarseSampleOrderTypeNV { + VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, + VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, + VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, + VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, + VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoarseSampleOrderTypeNV; +typedef struct VkShadingRatePaletteNV { + uint32_t shadingRatePaletteEntryCount; + const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; +} VkShadingRatePaletteNV; + +typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 shadingRateImageEnable; + uint32_t viewportCount; + const VkShadingRatePaletteNV* pShadingRatePalettes; +} VkPipelineViewportShadingRateImageStateCreateInfoNV; + +typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shadingRateImage; + VkBool32 shadingRateCoarseSampleOrder; +} VkPhysicalDeviceShadingRateImageFeaturesNV; + +typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { + VkStructureType sType; + void* pNext; + VkExtent2D shadingRateTexelSize; + uint32_t shadingRatePaletteSize; + uint32_t shadingRateMaxCoarseSamples; +} VkPhysicalDeviceShadingRateImagePropertiesNV; + +typedef struct VkCoarseSampleLocationNV { + uint32_t pixelX; + uint32_t pixelY; + uint32_t sample; +} VkCoarseSampleLocationNV; + +typedef struct VkCoarseSampleOrderCustomNV { + VkShadingRatePaletteEntryNV shadingRate; + uint32_t sampleCount; + uint32_t sampleLocationCount; + const VkCoarseSampleLocationNV* pSampleLocations; +} VkCoarseSampleOrderCustomNV; + +typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCoarseSampleOrderTypeNV sampleOrderType; + uint32_t customSampleOrderCount; + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; +} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV* pShadingRatePalettes); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( + VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); +#endif + + +// VK_NV_ray_tracing is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) +#define VK_NV_RAY_TRACING_SPEC_VERSION 3 +#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" +#define VK_SHADER_UNUSED_KHR (~0U) +#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR + +typedef enum VkRayTracingShaderGroupTypeKHR { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeKHR; +typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; + + +typedef enum VkGeometryTypeKHR { + VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, + VK_GEOMETRY_TYPE_AABBS_KHR = 1, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 2, + VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryTypeKHR; +typedef VkGeometryTypeKHR VkGeometryTypeNV; + + +typedef enum VkAccelerationStructureTypeKHR { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2, + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureTypeKHR; +typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; + + +typedef enum VkCopyAccelerationStructureModeKHR { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, + VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCopyAccelerationStructureModeKHR; +typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; + + +typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeNV; + +typedef enum VkGeometryFlagBitsKHR { + VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, + VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryFlagBitsKHR; +typedef VkFlags VkGeometryFlagsKHR; +typedef VkGeometryFlagsKHR VkGeometryFlagsNV; + +typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; + + +typedef enum VkGeometryInstanceFlagBitsKHR { + VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, + VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010, + VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsKHR; +typedef VkFlags VkGeometryInstanceFlagsKHR; +typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; + +typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; + + +typedef enum VkBuildAccelerationStructureFlagBitsKHR { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV = 0x00000200, +#endif + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR = 0x00000800, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsKHR; +typedef VkFlags VkBuildAccelerationStructureFlagsKHR; +typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; + +typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; + +typedef struct VkRayTracingShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; +} VkRayTracingShaderGroupCreateInfoNV; + +typedef struct VkRayTracingPipelineCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoNV* pGroups; + uint32_t maxRecursionDepth; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoNV; + +typedef struct VkGeometryTrianglesNV { + VkStructureType sType; + const void* pNext; + VkBuffer vertexData; + VkDeviceSize vertexOffset; + uint32_t vertexCount; + VkDeviceSize vertexStride; + VkFormat vertexFormat; + VkBuffer indexData; + VkDeviceSize indexOffset; + uint32_t indexCount; + VkIndexType indexType; + VkBuffer transformData; + VkDeviceSize transformOffset; +} VkGeometryTrianglesNV; + +typedef struct VkGeometryAABBNV { + VkStructureType sType; + const void* pNext; + VkBuffer aabbData; + uint32_t numAABBs; + uint32_t stride; + VkDeviceSize offset; +} VkGeometryAABBNV; + +typedef struct VkGeometryDataNV { + VkGeometryTrianglesNV triangles; + VkGeometryAABBNV aabbs; +} VkGeometryDataNV; + +typedef struct VkGeometryNV { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsKHR flags; +} VkGeometryNV; + +typedef struct VkAccelerationStructureInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeNV type; + VkBuildAccelerationStructureFlagsNV flags; + uint32_t instanceCount; + uint32_t geometryCount; + const VkGeometryNV* pGeometries; +} VkAccelerationStructureInfoNV; + +typedef struct VkAccelerationStructureCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceSize compactedSize; + VkAccelerationStructureInfoNV info; +} VkAccelerationStructureCreateInfoNV; + +typedef struct VkBindAccelerationStructureMemoryInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureNV accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoNV; + +typedef struct VkWriteDescriptorSetAccelerationStructureNV { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureNV* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureNV; + +typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureMemoryRequirementsTypeNV type; + VkAccelerationStructureNV accelerationStructure; +} VkAccelerationStructureMemoryRequirementsInfoNV; + +typedef struct VkPhysicalDeviceRayTracingPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxTriangleCount; + uint32_t maxDescriptorSetAccelerationStructures; +} VkPhysicalDeviceRayTracingPropertiesNV; + +typedef struct VkTransformMatrixKHR { + float matrix[3][4]; +} VkTransformMatrixKHR; + +typedef VkTransformMatrixKHR VkTransformMatrixNV; + +typedef struct VkAabbPositionsKHR { + float minX; + float minY; + float minZ; + float maxX; + float maxY; + float maxZ; +} VkAabbPositionsKHR; + +typedef VkAabbPositionsKHR VkAabbPositionsNV; + +typedef struct VkAccelerationStructureInstanceKHR { + VkTransformMatrixKHR transform; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureInstanceKHR; + +typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( + VkDevice device, + const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureNV* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV* pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( + VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( + VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( + VkDevice device, + VkPipeline pipeline, + uint32_t shader); +#endif + + +// VK_NV_representative_fragment_test is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_representative_fragment_test 1 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" +typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 representativeFragmentTest; +} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 representativeFragmentTestEnable; +} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + + +// VK_EXT_filter_cubic is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_filter_cubic 1 +#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3 +#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" +typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT { + VkStructureType sType; + void* pNext; + VkImageViewType imageViewType; +} VkPhysicalDeviceImageViewImageFormatInfoEXT; + +typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 filterCubic; + VkBool32 filterCubicMinmax; +} VkFilterCubicImageViewImageFormatPropertiesEXT; + + + +// VK_QCOM_render_pass_shader_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_shader_resolve 1 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" + + +// VK_EXT_global_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" +typedef VkQueueGlobalPriorityKHR VkQueueGlobalPriorityEXT; + +typedef VkDeviceQueueGlobalPriorityCreateInfoKHR VkDeviceQueueGlobalPriorityCreateInfoEXT; + + + +// VK_EXT_external_memory_host is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; + +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif + + +// VK_AMD_buffer_marker is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif + + +// VK_AMD_pipeline_compiler_control is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_pipeline_compiler_control 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" + +typedef enum VkPipelineCompilerControlFlagBitsAMD { + VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkPipelineCompilerControlFlagBitsAMD; +typedef VkFlags VkPipelineCompilerControlFlagsAMD; +typedef struct VkPipelineCompilerControlCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkPipelineCompilerControlFlagsAMD compilerControlFlags; +} VkPipelineCompilerControlCreateInfoAMD; + + + +// VK_EXT_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_calibrated_timestamps 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 +#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" +typedef VkTimeDomainKHR VkTimeDomainEXT; + +typedef VkCalibratedTimestampInfoKHR VkCalibratedTimestampInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + + +// VK_AMD_shader_core_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; + + + +// VK_AMD_memory_overallocation_behavior is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_memory_overallocation_behavior 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" + +typedef enum VkMemoryOverallocationBehaviorAMD { + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF +} VkMemoryOverallocationBehaviorAMD; +typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkMemoryOverallocationBehaviorAMD overallocationBehavior; +} VkDeviceMemoryOverallocationCreateInfoAMD; + + + +// VK_EXT_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +typedef VkVertexInputBindingDivisorDescriptionKHR VkVertexInputBindingDivisorDescriptionEXT; + +typedef VkPipelineVertexInputDivisorStateCreateInfoKHR VkPipelineVertexInputDivisorStateCreateInfoEXT; + +typedef VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + + + +// VK_EXT_pipeline_creation_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_creation_feedback 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" +typedef VkPipelineCreationFeedbackFlagBits VkPipelineCreationFeedbackFlagBitsEXT; + +typedef VkPipelineCreationFeedbackFlags VkPipelineCreationFeedbackFlagsEXT; + +typedef VkPipelineCreationFeedbackCreateInfo VkPipelineCreationFeedbackCreateInfoEXT; + +typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; + + + +// VK_NV_shader_subgroup_partitioned is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" + + +// VK_NV_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_compute_shader_derivatives 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" +typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 computeDerivativeGroupQuads; + VkBool32 computeDerivativeGroupLinear; +} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + + + +// VK_NV_mesh_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_mesh_shader 1 +#define VK_NV_MESH_SHADER_SPEC_VERSION 1 +#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; +} VkPhysicalDeviceMeshShaderFeaturesNV; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDrawMeshTasksCount; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskTotalMemorySize; + uint32_t maxTaskOutputCount; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshTotalMemorySize; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; +} VkPhysicalDeviceMeshShaderPropertiesNV; + +typedef struct VkDrawMeshTasksIndirectCommandNV { + uint32_t taskCount; + uint32_t firstTask; +} VkDrawMeshTasksIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( + VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_NV_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_shader_barycentric 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" +typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + + +// VK_NV_shader_image_footprint is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_image_footprint 1 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" +typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 imageFootprint; +} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + + +// VK_NV_scissor_exclusive is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_scissor_exclusive 1 +#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 2 +#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" +typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t exclusiveScissorCount; + const VkRect2D* pExclusiveScissors; +} VkPipelineViewportExclusiveScissorStateCreateInfoNV; + +typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 exclusiveScissor; +} VkPhysicalDeviceExclusiveScissorFeaturesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorEnableNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkBool32* pExclusiveScissorEnables); +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorEnableNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32* pExclusiveScissorEnables); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D* pExclusiveScissors); +#endif + + +// VK_NV_device_diagnostic_checkpoints is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_diagnostic_checkpoints 1 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" +typedef struct VkQueueFamilyCheckpointPropertiesNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags checkpointExecutionStageMask; +} VkQueueFamilyCheckpointPropertiesNV; + +typedef struct VkCheckpointDataNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlagBits stage; + void* pCheckpointMarker; +} VkCheckpointDataNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( + VkCommandBuffer commandBuffer, + const void* pCheckpointMarker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointDataNV* pCheckpointData); +#endif + + +// VK_INTEL_shader_integer_functions2 is a preprocessor guard. Do not pass it to API calls. +#define VK_INTEL_shader_integer_functions2 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" +typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerFunctions2; +} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + + +// VK_INTEL_performance_query is a preprocessor guard. Do not pass it to API calls. +#define VK_INTEL_performance_query 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) +#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 +#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" + +typedef enum VkPerformanceConfigurationTypeINTEL { + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, + VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceConfigurationTypeINTEL; + +typedef enum VkQueryPoolSamplingModeINTEL { + VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, + VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkQueryPoolSamplingModeINTEL; + +typedef enum VkPerformanceOverrideTypeINTEL { + VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, + VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, + VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceOverrideTypeINTEL; + +typedef enum VkPerformanceParameterTypeINTEL { + VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, + VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, + VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceParameterTypeINTEL; + +typedef enum VkPerformanceValueTypeINTEL { + VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0, + VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1, + VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, + VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, + VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, + VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceValueTypeINTEL; +typedef union VkPerformanceValueDataINTEL { + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char* valueString; +} VkPerformanceValueDataINTEL; + +typedef struct VkPerformanceValueINTEL { + VkPerformanceValueTypeINTEL type; + VkPerformanceValueDataINTEL data; +} VkPerformanceValueINTEL; + +typedef struct VkInitializePerformanceApiInfoINTEL { + VkStructureType sType; + const void* pNext; + void* pUserData; +} VkInitializePerformanceApiInfoINTEL; + +typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { + VkStructureType sType; + const void* pNext; + VkQueryPoolSamplingModeINTEL performanceCountersSampling; +} VkQueryPoolPerformanceQueryCreateInfoINTEL; + +typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; + +typedef struct VkPerformanceMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint64_t marker; +} VkPerformanceMarkerInfoINTEL; + +typedef struct VkPerformanceStreamMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint32_t marker; +} VkPerformanceStreamMarkerInfoINTEL; + +typedef struct VkPerformanceOverrideInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceOverrideTypeINTEL type; + VkBool32 enable; + uint64_t parameter; +} VkPerformanceOverrideInfoINTEL; + +typedef struct VkPerformanceConfigurationAcquireInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceConfigurationTypeINTEL type; +} VkPerformanceConfigurationAcquireInfoINTEL; + +typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo); +typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration); +typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL( + VkDevice device, + const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); + +VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL* pOverrideInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL( + VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, + VkPerformanceConfigurationINTEL* pConfiguration); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL( + VkDevice device, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL( + VkQueue queue, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( + VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL* pValue); +#endif + + +// VK_EXT_pci_bus_info is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pci_bus_info 1 +#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 +#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" +typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t pciDomain; + uint32_t pciBus; + uint32_t pciDevice; + uint32_t pciFunction; +} VkPhysicalDevicePCIBusInfoPropertiesEXT; + + + +// VK_AMD_display_native_hdr is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_display_native_hdr 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" +typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 localDimmingSupport; +} VkDisplayNativeHdrSurfaceCapabilitiesAMD; + +typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkBool32 localDimmingEnable; +} VkSwapchainDisplayNativeHdrCreateInfoAMD; + +typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( + VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable); +#endif + + +// VK_EXT_fragment_density_map is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2 +#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" +typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMap; + VkBool32 fragmentDensityMapDynamic; + VkBool32 fragmentDensityMapNonSubsampledImages; +} VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentDensityTexelSize; + VkExtent2D maxFragmentDensityTexelSize; + VkBool32 fragmentDensityInvocations; +} VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + +typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkAttachmentReference fragmentDensityMapAttachment; +} VkRenderPassFragmentDensityMapCreateInfoEXT; + + + +// VK_EXT_scalar_block_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_scalar_block_layout 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" +typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT; + + + +// VK_GOOGLE_hlsl_functionality1 is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_hlsl_functionality1 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME + + +// VK_GOOGLE_decorate_string is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_decorate_string 1 +#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 +#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" + + +// VK_EXT_subgroup_size_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_subgroup_size_control 1 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" +typedef VkPhysicalDeviceSubgroupSizeControlFeatures VkPhysicalDeviceSubgroupSizeControlFeaturesEXT; + +typedef VkPhysicalDeviceSubgroupSizeControlProperties VkPhysicalDeviceSubgroupSizeControlPropertiesEXT; + +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + + + +// VK_AMD_shader_core_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_core_properties2 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" + +typedef enum VkShaderCorePropertiesFlagBitsAMD { + VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderCorePropertiesFlagBitsAMD; +typedef VkFlags VkShaderCorePropertiesFlagsAMD; +typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { + VkStructureType sType; + void* pNext; + VkShaderCorePropertiesFlagsAMD shaderCoreFeatures; + uint32_t activeComputeUnitCount; +} VkPhysicalDeviceShaderCoreProperties2AMD; + + + +// VK_AMD_device_coherent_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_device_coherent_memory 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" +typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 deviceCoherentMemory; +} VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + + +// VK_EXT_shader_image_atomic_int64 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_image_atomic_int64 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" +typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderImageInt64Atomics; + VkBool32 sparseImageInt64Atomics; +} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + + +// VK_EXT_memory_budget is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_memory_budget 1 +#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 +#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" +typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; + VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + + +// VK_EXT_memory_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_memory_priority 1 +#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 +#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" +typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryPriority; +} VkPhysicalDeviceMemoryPriorityFeaturesEXT; + +typedef struct VkMemoryPriorityAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + float priority; +} VkMemoryPriorityAllocateInfoEXT; + + + +// VK_NV_dedicated_allocation_image_aliasing is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_dedicated_allocation_image_aliasing 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" +typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 dedicatedAllocationImageAliasing; +} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + + +// VK_EXT_buffer_device_address is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_buffer_device_address 1 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" +typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + +typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT; + +typedef struct VkBufferDeviceAddressCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; +} VkBufferDeviceAddressCreateInfoEXT; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); +#endif + + +// VK_EXT_tooling_info is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_tooling_info 1 +#define VK_EXT_TOOLING_INFO_SPEC_VERSION 1 +#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info" +typedef VkToolPurposeFlagBits VkToolPurposeFlagBitsEXT; + +typedef VkToolPurposeFlags VkToolPurposeFlagsEXT; + +typedef VkPhysicalDeviceToolProperties VkPhysicalDeviceToolPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); +#endif + + +// VK_EXT_separate_stencil_usage is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_separate_stencil_usage 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" +typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; + + + +// VK_EXT_validation_features is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_features 1 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 6 +#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" + +typedef enum VkValidationFeatureEnableEXT { + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, + VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, + VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, + VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, + VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureEnableEXT; + +typedef enum VkValidationFeatureDisableEXT { + VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0, + VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1, + VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2, + VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3, + VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, + VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, + VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, + VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7, + VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureDisableEXT; +typedef struct VkValidationFeaturesEXT { + VkStructureType sType; + const void* pNext; + uint32_t enabledValidationFeatureCount; + const VkValidationFeatureEnableEXT* pEnabledValidationFeatures; + uint32_t disabledValidationFeatureCount; + const VkValidationFeatureDisableEXT* pDisabledValidationFeatures; +} VkValidationFeaturesEXT; + + + +// VK_NV_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_matrix 1 +#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" +typedef VkComponentTypeKHR VkComponentTypeNV; + +typedef VkScopeKHR VkScopeNV; + +typedef struct VkCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeNV AType; + VkComponentTypeNV BType; + VkComponentTypeNV CType; + VkComponentTypeNV DType; + VkScopeNV scope; +} VkCooperativeMatrixPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesNV* pProperties); +#endif + + +// VK_NV_coverage_reduction_mode is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_coverage_reduction_mode 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" + +typedef enum VkCoverageReductionModeNV { + VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, + VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, + VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageReductionModeNV; +typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; +typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 coverageReductionMode; +} VkPhysicalDeviceCoverageReductionModeFeaturesNV; + +typedef struct VkPipelineCoverageReductionStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageReductionStateCreateFlagsNV flags; + VkCoverageReductionModeNV coverageReductionMode; +} VkPipelineCoverageReductionStateCreateInfoNV; + +typedef struct VkFramebufferMixedSamplesCombinationNV { + VkStructureType sType; + void* pNext; + VkCoverageReductionModeNV coverageReductionMode; + VkSampleCountFlagBits rasterizationSamples; + VkSampleCountFlags depthStencilSamples; + VkSampleCountFlags colorSamples; +} VkFramebufferMixedSamplesCombinationNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t* pCombinationCount, + VkFramebufferMixedSamplesCombinationNV* pCombinations); +#endif + + +// VK_EXT_fragment_shader_interlock is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_shader_interlock 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" +typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderSampleInterlock; + VkBool32 fragmentShaderPixelInterlock; + VkBool32 fragmentShaderShadingRateInterlock; +} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + + +// VK_EXT_ycbcr_image_arrays is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_ycbcr_image_arrays 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" +typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcrImageArrays; +} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + + + +// VK_EXT_provoking_vertex is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_provoking_vertex 1 +#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 +#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" + +typedef enum VkProvokingVertexModeEXT { + VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0, + VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1, + VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkProvokingVertexModeEXT; +typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexLast; + VkBool32 transformFeedbackPreservesProvokingVertex; +} VkPhysicalDeviceProvokingVertexFeaturesEXT; + +typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexModePerPipeline; + VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex; +} VkPhysicalDeviceProvokingVertexPropertiesEXT; + +typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkProvokingVertexModeEXT provokingVertexMode; +} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + + +// VK_EXT_headless_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_headless_surface 1 +#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 +#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" +typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; +typedef struct VkHeadlessSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkHeadlessSurfaceCreateFlagsEXT flags; +} VkHeadlessSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( + VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_EXT_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_line_rasterization 1 +#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" + +typedef enum VkLineRasterizationModeEXT { + VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = 0, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3, + VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkLineRasterizationModeEXT; +typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; +} VkPhysicalDeviceLineRasterizationFeaturesEXT; + +typedef struct VkPhysicalDeviceLineRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; +} VkPhysicalDeviceLineRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationLineStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkLineRasterizationModeEXT lineRasterizationMode; + VkBool32 stippledLineEnable; + uint32_t lineStippleFactor; + uint16_t lineStipplePattern; +} VkPipelineRasterizationLineStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif + + +// VK_EXT_shader_atomic_float is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_atomic_float 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" +typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat32Atomics; + VkBool32 shaderBufferFloat32AtomicAdd; + VkBool32 shaderBufferFloat64Atomics; + VkBool32 shaderBufferFloat64AtomicAdd; + VkBool32 shaderSharedFloat32Atomics; + VkBool32 shaderSharedFloat32AtomicAdd; + VkBool32 shaderSharedFloat64Atomics; + VkBool32 shaderSharedFloat64AtomicAdd; + VkBool32 shaderImageFloat32Atomics; + VkBool32 shaderImageFloat32AtomicAdd; + VkBool32 sparseImageFloat32Atomics; + VkBool32 sparseImageFloat32AtomicAdd; +} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + + +// VK_EXT_host_query_reset is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_query_reset 1 +#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 +#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" +typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); +#endif + + +// VK_EXT_index_type_uint8 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_index_type_uint8 1 +#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" +typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 indexTypeUint8; +} VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + + + +// VK_EXT_extended_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" +typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState; +} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); +#endif + + +// VK_EXT_host_image_copy is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_image_copy 1 +#define VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION 1 +#define VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME "VK_EXT_host_image_copy" + +typedef enum VkHostImageCopyFlagBitsEXT { + VK_HOST_IMAGE_COPY_MEMCPY_EXT = 0x00000001, + VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkHostImageCopyFlagBitsEXT; +typedef VkFlags VkHostImageCopyFlagsEXT; +typedef struct VkPhysicalDeviceHostImageCopyFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 hostImageCopy; +} VkPhysicalDeviceHostImageCopyFeaturesEXT; + +typedef struct VkPhysicalDeviceHostImageCopyPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceHostImageCopyPropertiesEXT; + +typedef struct VkMemoryToImageCopyEXT { + VkStructureType sType; + const void* pNext; + const void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkMemoryToImageCopyEXT; + +typedef struct VkImageToMemoryCopyEXT { + VkStructureType sType; + const void* pNext; + void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkImageToMemoryCopyEXT; + +typedef struct VkCopyMemoryToImageInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkMemoryToImageCopyEXT* pRegions; +} VkCopyMemoryToImageInfoEXT; + +typedef struct VkCopyImageToMemoryInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + uint32_t regionCount; + const VkImageToMemoryCopyEXT* pRegions; +} VkCopyImageToMemoryInfoEXT; + +typedef struct VkCopyImageToImageInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageToImageInfoEXT; + +typedef struct VkHostImageLayoutTransitionInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageLayout oldLayout; + VkImageLayout newLayout; + VkImageSubresourceRange subresourceRange; +} VkHostImageLayoutTransitionInfoEXT; + +typedef struct VkSubresourceHostMemcpySizeEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize size; +} VkSubresourceHostMemcpySizeEXT; + +typedef struct VkHostImageCopyDevicePerformanceQueryEXT { + VkStructureType sType; + void* pNext; + VkBool32 optimalDeviceAccess; + VkBool32 identicalMemoryLayout; +} VkHostImageCopyDevicePerformanceQueryEXT; + +typedef VkSubresourceLayout2KHR VkSubresourceLayout2EXT; + +typedef VkImageSubresource2KHR VkImageSubresource2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT( + VkDevice device, + const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT( + VkDevice device, + const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT( + VkDevice device, + const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT( + VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfoEXT* pTransitions); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( + VkDevice device, + VkImage image, + const VkImageSubresource2KHR* pSubresource, + VkSubresourceLayout2KHR* pLayout); +#endif + + +// VK_EXT_shader_atomic_float2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_atomic_float2 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2" +typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat16Atomics; + VkBool32 shaderBufferFloat16AtomicAdd; + VkBool32 shaderBufferFloat16AtomicMinMax; + VkBool32 shaderBufferFloat32AtomicMinMax; + VkBool32 shaderBufferFloat64AtomicMinMax; + VkBool32 shaderSharedFloat16Atomics; + VkBool32 shaderSharedFloat16AtomicAdd; + VkBool32 shaderSharedFloat16AtomicMinMax; + VkBool32 shaderSharedFloat32AtomicMinMax; + VkBool32 shaderSharedFloat64AtomicMinMax; + VkBool32 shaderImageFloat32AtomicMinMax; + VkBool32 sparseImageFloat32AtomicMinMax; +} VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + + +// VK_EXT_surface_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_surface_maintenance1 1 +#define VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_surface_maintenance1" + +typedef enum VkPresentScalingFlagBitsEXT { + VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT = 0x00000001, + VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT = 0x00000002, + VK_PRESENT_SCALING_STRETCH_BIT_EXT = 0x00000004, + VK_PRESENT_SCALING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPresentScalingFlagBitsEXT; +typedef VkFlags VkPresentScalingFlagsEXT; + +typedef enum VkPresentGravityFlagBitsEXT { + VK_PRESENT_GRAVITY_MIN_BIT_EXT = 0x00000001, + VK_PRESENT_GRAVITY_MAX_BIT_EXT = 0x00000002, + VK_PRESENT_GRAVITY_CENTERED_BIT_EXT = 0x00000004, + VK_PRESENT_GRAVITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPresentGravityFlagBitsEXT; +typedef VkFlags VkPresentGravityFlagsEXT; +typedef struct VkSurfacePresentModeEXT { + VkStructureType sType; + void* pNext; + VkPresentModeKHR presentMode; +} VkSurfacePresentModeEXT; + +typedef struct VkSurfacePresentScalingCapabilitiesEXT { + VkStructureType sType; + void* pNext; + VkPresentScalingFlagsEXT supportedPresentScaling; + VkPresentGravityFlagsEXT supportedPresentGravityX; + VkPresentGravityFlagsEXT supportedPresentGravityY; + VkExtent2D minScaledImageExtent; + VkExtent2D maxScaledImageExtent; +} VkSurfacePresentScalingCapabilitiesEXT; + +typedef struct VkSurfacePresentModeCompatibilityEXT { + VkStructureType sType; + void* pNext; + uint32_t presentModeCount; + VkPresentModeKHR* pPresentModes; +} VkSurfacePresentModeCompatibilityEXT; + + + +// VK_EXT_swapchain_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_swapchain_maintenance1 1 +#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_swapchain_maintenance1" +typedef struct VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 swapchainMaintenance1; +} VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT; + +typedef struct VkSwapchainPresentFenceInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkFence* pFences; +} VkSwapchainPresentFenceInfoEXT; + +typedef struct VkSwapchainPresentModesCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t presentModeCount; + const VkPresentModeKHR* pPresentModes; +} VkSwapchainPresentModesCreateInfoEXT; + +typedef struct VkSwapchainPresentModeInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentModeKHR* pPresentModes; +} VkSwapchainPresentModeInfoEXT; + +typedef struct VkSwapchainPresentScalingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPresentScalingFlagsEXT scalingBehavior; + VkPresentGravityFlagsEXT presentGravityX; + VkPresentGravityFlagsEXT presentGravityY; +} VkSwapchainPresentScalingCreateInfoEXT; + +typedef struct VkReleaseSwapchainImagesInfoEXT { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndexCount; + const uint32_t* pImageIndices; +} VkReleaseSwapchainImagesInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkReleaseSwapchainImagesEXT)(VkDevice device, const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT( + VkDevice device, + const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); +#endif + + +// VK_EXT_shader_demote_to_helper_invocation is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_demote_to_helper_invocation 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" +typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + + + +// VK_NV_device_generated_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) +#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" + +typedef enum VkIndirectCommandsTokenTypeNV { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV = 1000428003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV = 1000428004, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNV; + +typedef enum VkIndirectStateFlagBitsNV { + VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, + VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectStateFlagBitsNV; +typedef VkFlags VkIndirectStateFlagsNV; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNV; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxGraphicsShaderGroupCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsStreamCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsStreamStride; + uint32_t minSequencesCountBufferOffsetAlignment; + uint32_t minSequencesIndexBufferOffsetAlignment; + uint32_t minIndirectCommandsBufferOffsetAlignment; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +typedef struct VkGraphicsShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; +} VkGraphicsShaderGroupCreateInfoNV; + +typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t groupCount; + const VkGraphicsShaderGroupCreateInfoNV* pGroups; + uint32_t pipelineCount; + const VkPipeline* pPipelines; +} VkGraphicsPipelineShaderGroupsCreateInfoNV; + +typedef struct VkBindShaderGroupIndirectCommandNV { + uint32_t groupIndex; +} VkBindShaderGroupIndirectCommandNV; + +typedef struct VkBindIndexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandNV; + +typedef struct VkBindVertexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandNV; + +typedef struct VkSetStateFlagsIndirectCommandNV { + uint32_t data; +} VkSetStateFlagsIndirectCommandNV; + +typedef struct VkIndirectCommandsStreamNV { + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsStreamNV; + +typedef struct VkIndirectCommandsLayoutTokenNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeNV tokenType; + uint32_t stream; + uint32_t offset; + uint32_t vertexBindingUnit; + VkBool32 vertexDynamicStride; + VkPipelineLayout pushconstantPipelineLayout; + VkShaderStageFlags pushconstantShaderStageFlags; + uint32_t pushconstantOffset; + uint32_t pushconstantSize; + VkIndirectStateFlagsNV indirectStateFlags; + uint32_t indexTypeCount; + const VkIndexType* pIndexTypes; + const uint32_t* pIndexTypeValues; +} VkIndirectCommandsLayoutTokenNV; + +typedef struct VkIndirectCommandsLayoutCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsNV flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNV* pTokens; + uint32_t streamCount; + const uint32_t* pStreamStrides; +} VkIndirectCommandsLayoutCreateInfoNV; + +typedef struct VkGeneratedCommandsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t streamCount; + const VkIndirectCommandsStreamNV* pStreams; + uint32_t sequencesCount; + VkBuffer preprocessBuffer; + VkDeviceSize preprocessOffset; + VkDeviceSize preprocessSize; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkGeneratedCommandsInfoNV; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkGeneratedCommandsMemoryRequirementsInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_NV_inherited_viewport_scissor is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_inherited_viewport_scissor 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" +typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 inheritedViewportScissor2D; +} VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + +typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportScissor2D; + uint32_t viewportDepthCount; + const VkViewport* pViewportDepths; +} VkCommandBufferInheritanceViewportScissorInfoNV; + + + +// VK_EXT_texel_buffer_alignment is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_texel_buffer_alignment 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" +typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 texelBufferAlignment; +} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT; + + + +// VK_QCOM_render_pass_transform is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_transform 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 3 +#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" +typedef struct VkRenderPassTransformBeginInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkRenderPassTransformBeginInfoQCOM; + +typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; + VkRect2D renderArea; +} VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + + +// VK_EXT_depth_bias_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_bias_control 1 +#define VK_EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME "VK_EXT_depth_bias_control" + +typedef enum VkDepthBiasRepresentationEXT { + VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT = 0, + VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT = 1, + VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT = 2, + VK_DEPTH_BIAS_REPRESENTATION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDepthBiasRepresentationEXT; +typedef struct VkPhysicalDeviceDepthBiasControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthBiasControl; + VkBool32 leastRepresentableValueForceUnormRepresentation; + VkBool32 floatRepresentation; + VkBool32 depthBiasExact; +} VkPhysicalDeviceDepthBiasControlFeaturesEXT; + +typedef struct VkDepthBiasInfoEXT { + VkStructureType sType; + const void* pNext; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; +} VkDepthBiasInfoEXT; + +typedef struct VkDepthBiasRepresentationInfoEXT { + VkStructureType sType; + const void* pNext; + VkDepthBiasRepresentationEXT depthBiasRepresentation; + VkBool32 depthBiasExact; +} VkDepthBiasRepresentationInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias2EXT)(VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias2EXT( + VkCommandBuffer commandBuffer, + const VkDepthBiasInfoEXT* pDepthBiasInfo); +#endif + + +// VK_EXT_device_memory_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_memory_report 1 +#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" + +typedef enum VkDeviceMemoryReportEventTypeEXT { + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceMemoryReportEventTypeEXT; +typedef VkFlags VkDeviceMemoryReportFlagsEXT; +typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceMemoryReport; +} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + +typedef struct VkDeviceMemoryReportCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + VkDeviceMemoryReportEventTypeEXT type; + uint64_t memoryObjectId; + VkDeviceSize size; + VkObjectType objectType; + uint64_t objectHandle; + uint32_t heapIndex; +} VkDeviceMemoryReportCallbackDataEXT; + +typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( + const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback; + void* pUserData; +} VkDeviceDeviceMemoryReportCreateInfoEXT; + + + +// VK_EXT_acquire_drm_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_acquire_drm_display 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + uint32_t connectorId, + VkDisplayKHR* display); +#endif + + +// VK_EXT_robustness2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_robustness2 1 +#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 +#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" +typedef struct VkPhysicalDeviceRobustness2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustBufferAccess2; + VkBool32 robustImageAccess2; + VkBool32 nullDescriptor; +} VkPhysicalDeviceRobustness2FeaturesEXT; + +typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize robustStorageBufferAccessSizeAlignment; + VkDeviceSize robustUniformBufferAccessSizeAlignment; +} VkPhysicalDeviceRobustness2PropertiesEXT; + + + +// VK_EXT_custom_border_color is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_custom_border_color 1 +#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 +#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" +typedef struct VkSamplerCustomBorderColorCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkClearColorValue customBorderColor; + VkFormat format; +} VkSamplerCustomBorderColorCreateInfoEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCustomBorderColorSamplers; +} VkPhysicalDeviceCustomBorderColorPropertiesEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 customBorderColors; + VkBool32 customBorderColorWithoutFormat; +} VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + + +// VK_GOOGLE_user_type is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_user_type 1 +#define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 +#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" + + +// VK_NV_present_barrier is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_present_barrier 1 +#define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1 +#define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier" +typedef struct VkPhysicalDevicePresentBarrierFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrier; +} VkPhysicalDevicePresentBarrierFeaturesNV; + +typedef struct VkSurfaceCapabilitiesPresentBarrierNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrierSupported; +} VkSurfaceCapabilitiesPresentBarrierNV; + +typedef struct VkSwapchainPresentBarrierCreateInfoNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrierEnable; +} VkSwapchainPresentBarrierCreateInfoNV; + + + +// VK_EXT_private_data is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_private_data 1 +typedef VkPrivateDataSlot VkPrivateDataSlotEXT; + +#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1 +#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data" +typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT; + +typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT; + +typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT; + +typedef VkPrivateDataSlotCreateInfo VkPrivateDataSlotCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); +#endif + + +// VK_EXT_pipeline_creation_cache_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_creation_cache_control 1 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" +typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; + + + +// VK_NV_device_diagnostics_config is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_diagnostics_config 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" + +typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV = 0x00000008, + VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkDeviceDiagnosticsConfigFlagBitsNV; +typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; +typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 diagnosticsConfig; +} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + +typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceDiagnosticsConfigFlagsNV flags; +} VkDeviceDiagnosticsConfigCreateInfoNV; + + + +// VK_QCOM_render_pass_store_ops is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_store_ops 1 +#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" + + +// VK_NV_cuda_kernel_launch is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cuda_kernel_launch 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaModuleNV) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaFunctionNV) +#define VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION 2 +#define VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME "VK_NV_cuda_kernel_launch" +typedef struct VkCudaModuleCreateInfoNV { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCudaModuleCreateInfoNV; + +typedef struct VkCudaFunctionCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCudaModuleNV module; + const char* pName; +} VkCudaFunctionCreateInfoNV; + +typedef struct VkCudaLaunchInfoNV { + VkStructureType sType; + const void* pNext; + VkCudaFunctionNV function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCudaLaunchInfoNV; + +typedef struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cudaKernelLaunchFeatures; +} VkPhysicalDeviceCudaKernelLaunchFeaturesNV; + +typedef struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t computeCapabilityMinor; + uint32_t computeCapabilityMajor; +} VkPhysicalDeviceCudaKernelLaunchPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaModuleNV)(VkDevice device, const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaModuleNV* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkGetCudaModuleCacheNV)(VkDevice device, VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaFunctionNV)(VkDevice device, const VkCudaFunctionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaFunctionNV* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCudaModuleNV)(VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCudaFunctionNV)(VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCudaLaunchKernelNV)(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaModuleNV( + VkDevice device, + const VkCudaModuleCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaModuleNV* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCudaModuleCacheNV( + VkDevice device, + VkCudaModuleNV module, + size_t* pCacheSize, + void* pCacheData); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaFunctionNV( + VkDevice device, + const VkCudaFunctionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaFunctionNV* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCudaModuleNV( + VkDevice device, + VkCudaModuleNV module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCudaFunctionNV( + VkDevice device, + VkCudaFunctionNV function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCudaLaunchKernelNV( + VkCommandBuffer commandBuffer, + const VkCudaLaunchInfoNV* pLaunchInfo); +#endif + + +// VK_NV_low_latency is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_low_latency 1 +#define VK_NV_LOW_LATENCY_SPEC_VERSION 1 +#define VK_NV_LOW_LATENCY_EXTENSION_NAME "VK_NV_low_latency" +typedef struct VkQueryLowLatencySupportNV { + VkStructureType sType; + const void* pNext; + void* pQueriedLowLatencyData; +} VkQueryLowLatencySupportNV; + + + +// VK_EXT_descriptor_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_buffer 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +#define VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION 1 +#define VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME "VK_EXT_descriptor_buffer" +typedef struct VkPhysicalDeviceDescriptorBufferPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 combinedImageSamplerDescriptorSingleArray; + VkBool32 bufferlessPushDescriptors; + VkBool32 allowSamplerImageViewPostSubmitCreation; + VkDeviceSize descriptorBufferOffsetAlignment; + uint32_t maxDescriptorBufferBindings; + uint32_t maxResourceDescriptorBufferBindings; + uint32_t maxSamplerDescriptorBufferBindings; + uint32_t maxEmbeddedImmutableSamplerBindings; + uint32_t maxEmbeddedImmutableSamplers; + size_t bufferCaptureReplayDescriptorDataSize; + size_t imageCaptureReplayDescriptorDataSize; + size_t imageViewCaptureReplayDescriptorDataSize; + size_t samplerCaptureReplayDescriptorDataSize; + size_t accelerationStructureCaptureReplayDescriptorDataSize; + size_t samplerDescriptorSize; + size_t combinedImageSamplerDescriptorSize; + size_t sampledImageDescriptorSize; + size_t storageImageDescriptorSize; + size_t uniformTexelBufferDescriptorSize; + size_t robustUniformTexelBufferDescriptorSize; + size_t storageTexelBufferDescriptorSize; + size_t robustStorageTexelBufferDescriptorSize; + size_t uniformBufferDescriptorSize; + size_t robustUniformBufferDescriptorSize; + size_t storageBufferDescriptorSize; + size_t robustStorageBufferDescriptorSize; + size_t inputAttachmentDescriptorSize; + size_t accelerationStructureDescriptorSize; + VkDeviceSize maxSamplerDescriptorBufferRange; + VkDeviceSize maxResourceDescriptorBufferRange; + VkDeviceSize samplerDescriptorBufferAddressSpaceSize; + VkDeviceSize resourceDescriptorBufferAddressSpaceSize; + VkDeviceSize descriptorBufferAddressSpaceSize; +} VkPhysicalDeviceDescriptorBufferPropertiesEXT; + +typedef struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + size_t combinedImageSamplerDensityMapDescriptorSize; +} VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + +typedef struct VkPhysicalDeviceDescriptorBufferFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 descriptorBuffer; + VkBool32 descriptorBufferCaptureReplay; + VkBool32 descriptorBufferImageLayoutIgnored; + VkBool32 descriptorBufferPushDescriptors; +} VkPhysicalDeviceDescriptorBufferFeaturesEXT; + +typedef struct VkDescriptorAddressInfoEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddress address; + VkDeviceSize range; + VkFormat format; +} VkDescriptorAddressInfoEXT; + +typedef struct VkDescriptorBufferBindingInfoEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddress address; + VkBufferUsageFlags usage; +} VkDescriptorBufferBindingInfoEXT; + +typedef struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT { + VkStructureType sType; + void* pNext; + VkBuffer buffer; +} VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; + +typedef union VkDescriptorDataEXT { + const VkSampler* pSampler; + const VkDescriptorImageInfo* pCombinedImageSampler; + const VkDescriptorImageInfo* pInputAttachmentImage; + const VkDescriptorImageInfo* pSampledImage; + const VkDescriptorImageInfo* pStorageImage; + const VkDescriptorAddressInfoEXT* pUniformTexelBuffer; + const VkDescriptorAddressInfoEXT* pStorageTexelBuffer; + const VkDescriptorAddressInfoEXT* pUniformBuffer; + const VkDescriptorAddressInfoEXT* pStorageBuffer; + VkDeviceAddress accelerationStructure; +} VkDescriptorDataEXT; + +typedef struct VkDescriptorGetInfoEXT { + VkStructureType sType; + const void* pNext; + VkDescriptorType type; + VkDescriptorDataEXT data; +} VkDescriptorGetInfoEXT; + +typedef struct VkBufferCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferCaptureDescriptorDataInfoEXT; + +typedef struct VkImageCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageCaptureDescriptorDataInfoEXT; + +typedef struct VkImageViewCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; +} VkImageViewCaptureDescriptorDataInfoEXT; + +typedef struct VkSamplerCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampler sampler; +} VkSamplerCaptureDescriptorDataInfoEXT; + +typedef struct VkOpaqueCaptureDescriptorDataCreateInfoEXT { + VkStructureType sType; + const void* pNext; + const void* opaqueCaptureDescriptorData; +} VkOpaqueCaptureDescriptorDataCreateInfoEXT; + +typedef struct VkAccelerationStructureCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; + VkAccelerationStructureNV accelerationStructureNV; +} VkAccelerationStructureCaptureDescriptorDataInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSizeEXT)(VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)(VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorEXT)(VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos); +typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsetsEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSizeEXT( + VkDevice device, + VkDescriptorSetLayout layout, + VkDeviceSize* pLayoutSizeInBytes); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutBindingOffsetEXT( + VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize* pOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorEXT( + VkDevice device, + const VkDescriptorGetInfoEXT* pDescriptorInfo, + size_t dataSize, + void* pDescriptor); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT* pBindingInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsetsEXT( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t* pBufferIndices, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkBufferCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkImageCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSamplerOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, + void* pData); +#endif + + +// VK_EXT_graphics_pipeline_library is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_graphics_pipeline_library 1 +#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library" + +typedef enum VkGraphicsPipelineLibraryFlagBitsEXT { + VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT = 0x00000001, + VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT = 0x00000002, + VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT = 0x00000004, + VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT = 0x00000008, + VK_GRAPHICS_PIPELINE_LIBRARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkGraphicsPipelineLibraryFlagBitsEXT; +typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT; +typedef struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 graphicsPipelineLibrary; +} VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + +typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 graphicsPipelineLibraryFastLinking; + VkBool32 graphicsPipelineLibraryIndependentInterpolationDecoration; +} VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + +typedef struct VkGraphicsPipelineLibraryCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkGraphicsPipelineLibraryFlagsEXT flags; +} VkGraphicsPipelineLibraryCreateInfoEXT; + + + +// VK_AMD_shader_early_and_late_fragment_tests is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_early_and_late_fragment_tests 1 +#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1 +#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests" +typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 shaderEarlyAndLateFragmentTests; +} VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + + +// VK_NV_fragment_shading_rate_enums is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_shading_rate_enums 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" + +typedef enum VkFragmentShadingRateTypeNV { + VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0, + VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1, + VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateTypeNV; + +typedef enum VkFragmentShadingRateNV { + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10, + VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11, + VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12, + VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13, + VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14, + VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15, + VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateNV; +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShadingRateEnums; + VkBool32 supersampleFragmentShadingRates; + VkBool32 noInvocationFragmentShadingRates; +} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV { + VkStructureType sType; + void* pNext; + VkSampleCountFlagBits maxFragmentShadingRateInvocationCount; +} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkFragmentShadingRateTypeNV shadingRateType; + VkFragmentShadingRateNV shadingRate; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( + VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +// VK_NV_ray_tracing_motion_blur is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_motion_blur 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur" + +typedef enum VkAccelerationStructureMotionInstanceTypeNV { + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV = 0, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV = 1, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV = 2, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMotionInstanceTypeNV; +typedef VkFlags VkAccelerationStructureMotionInfoFlagsNV; +typedef VkFlags VkAccelerationStructureMotionInstanceFlagsNV; +typedef union VkDeviceOrHostAddressConstKHR { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstKHR; + +typedef struct VkAccelerationStructureGeometryMotionTrianglesDataNV { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR vertexData; +} VkAccelerationStructureGeometryMotionTrianglesDataNV; + +typedef struct VkAccelerationStructureMotionInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t maxInstances; + VkAccelerationStructureMotionInfoFlagsNV flags; +} VkAccelerationStructureMotionInfoNV; + +typedef struct VkAccelerationStructureMatrixMotionInstanceNV { + VkTransformMatrixKHR transformT0; + VkTransformMatrixKHR transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureMatrixMotionInstanceNV; + +typedef struct VkSRTDataNV { + float sx; + float a; + float b; + float pvx; + float sy; + float c; + float pvy; + float sz; + float pvz; + float qx; + float qy; + float qz; + float qw; + float tx; + float ty; + float tz; +} VkSRTDataNV; + +typedef struct VkAccelerationStructureSRTMotionInstanceNV { + VkSRTDataNV transformT0; + VkSRTDataNV transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureSRTMotionInstanceNV; + +typedef union VkAccelerationStructureMotionInstanceDataNV { + VkAccelerationStructureInstanceKHR staticInstance; + VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; +} VkAccelerationStructureMotionInstanceDataNV; + +typedef struct VkAccelerationStructureMotionInstanceNV { + VkAccelerationStructureMotionInstanceTypeNV type; + VkAccelerationStructureMotionInstanceFlagsNV flags; + VkAccelerationStructureMotionInstanceDataNV data; +} VkAccelerationStructureMotionInstanceNV; + +typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingMotionBlur; + VkBool32 rayTracingMotionBlurPipelineTraceRaysIndirect; +} VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; + + + +// VK_EXT_ycbcr_2plane_444_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_ycbcr_2plane_444_formats 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" +typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcr2plane444Formats; +} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + + +// VK_EXT_fragment_density_map2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map2 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" +typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapDeferred; +} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subsampledLoads; + VkBool32 subsampledCoarseReconstructionEarlyAccess; + uint32_t maxSubsampledArrayLayers; + uint32_t maxDescriptorSetSubsampledSamplers; +} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + + +// VK_QCOM_rotated_copy_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_rotated_copy_commands 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" +typedef struct VkCopyCommandTransformInfoQCOM { + VkStructureType sType; + const void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkCopyCommandTransformInfoQCOM; + + + +// VK_EXT_image_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_robustness 1 +#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" +typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessFeaturesEXT; + + + +// VK_EXT_image_compression_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_compression_control 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control" + +typedef enum VkImageCompressionFlagBitsEXT { + VK_IMAGE_COMPRESSION_DEFAULT_EXT = 0, + VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT = 0x00000001, + VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT = 0x00000002, + VK_IMAGE_COMPRESSION_DISABLED_EXT = 0x00000004, + VK_IMAGE_COMPRESSION_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkImageCompressionFlagBitsEXT; +typedef VkFlags VkImageCompressionFlagsEXT; + +typedef enum VkImageCompressionFixedRateFlagBitsEXT { + VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT = 0, + VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT = 0x00000001, + VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT = 0x00000002, + VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT = 0x00000004, + VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT = 0x00000008, + VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT = 0x00000010, + VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT = 0x00000020, + VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT = 0x00000040, + VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT = 0x00000080, + VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT = 0x00000100, + VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT = 0x00000200, + VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT = 0x00000400, + VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT = 0x00000800, + VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT = 0x00001000, + VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT = 0x00002000, + VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT = 0x00004000, + VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT = 0x00008000, + VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT = 0x00010000, + VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT = 0x00020000, + VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT = 0x00040000, + VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT = 0x00080000, + VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT = 0x00100000, + VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT = 0x00200000, + VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT = 0x00400000, + VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT = 0x00800000, + VK_IMAGE_COMPRESSION_FIXED_RATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkImageCompressionFixedRateFlagBitsEXT; +typedef VkFlags VkImageCompressionFixedRateFlagsEXT; +typedef struct VkPhysicalDeviceImageCompressionControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageCompressionControl; +} VkPhysicalDeviceImageCompressionControlFeaturesEXT; + +typedef struct VkImageCompressionControlEXT { + VkStructureType sType; + const void* pNext; + VkImageCompressionFlagsEXT flags; + uint32_t compressionControlPlaneCount; + VkImageCompressionFixedRateFlagsEXT* pFixedRateFlags; +} VkImageCompressionControlEXT; + +typedef struct VkImageCompressionPropertiesEXT { + VkStructureType sType; + void* pNext; + VkImageCompressionFlagsEXT imageCompressionFlags; + VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags; +} VkImageCompressionPropertiesEXT; + + + +// VK_EXT_attachment_feedback_loop_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_attachment_feedback_loop_layout 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout" +typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 attachmentFeedbackLoopLayout; +} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + + +// VK_EXT_4444_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_4444_formats 1 +#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" +typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatA4R4G4B4; + VkBool32 formatA4B4G4R4; +} VkPhysicalDevice4444FormatsFeaturesEXT; + + + +// VK_EXT_device_fault is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_fault 1 +#define VK_EXT_DEVICE_FAULT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault" + +typedef enum VkDeviceFaultAddressTypeEXT { + VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT = 0, + VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT = 1, + VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT = 2, + VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT = 3, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT = 4, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT = 5, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT = 6, + VK_DEVICE_FAULT_ADDRESS_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceFaultAddressTypeEXT; + +typedef enum VkDeviceFaultVendorBinaryHeaderVersionEXT { + VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT = 1, + VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceFaultVendorBinaryHeaderVersionEXT; +typedef struct VkPhysicalDeviceFaultFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceFault; + VkBool32 deviceFaultVendorBinary; +} VkPhysicalDeviceFaultFeaturesEXT; + +typedef struct VkDeviceFaultCountsEXT { + VkStructureType sType; + void* pNext; + uint32_t addressInfoCount; + uint32_t vendorInfoCount; + VkDeviceSize vendorBinarySize; +} VkDeviceFaultCountsEXT; + +typedef struct VkDeviceFaultAddressInfoEXT { + VkDeviceFaultAddressTypeEXT addressType; + VkDeviceAddress reportedAddress; + VkDeviceSize addressPrecision; +} VkDeviceFaultAddressInfoEXT; + +typedef struct VkDeviceFaultVendorInfoEXT { + char description[VK_MAX_DESCRIPTION_SIZE]; + uint64_t vendorFaultCode; + uint64_t vendorFaultData; +} VkDeviceFaultVendorInfoEXT; + +typedef struct VkDeviceFaultInfoEXT { + VkStructureType sType; + void* pNext; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkDeviceFaultAddressInfoEXT* pAddressInfos; + VkDeviceFaultVendorInfoEXT* pVendorInfos; + void* pVendorBinaryData; +} VkDeviceFaultInfoEXT; + +typedef struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT { + uint32_t headerSize; + VkDeviceFaultVendorBinaryHeaderVersionEXT headerVersion; + uint32_t vendorID; + uint32_t deviceID; + uint32_t driverVersion; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + uint32_t applicationNameOffset; + uint32_t applicationVersion; + uint32_t engineNameOffset; + uint32_t engineVersion; + uint32_t apiVersion; +} VkDeviceFaultVendorBinaryHeaderVersionOneEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceFaultInfoEXT)(VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT( + VkDevice device, + VkDeviceFaultCountsEXT* pFaultCounts, + VkDeviceFaultInfoEXT* pFaultInfo); +#endif + + +// VK_ARM_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_rasterization_order_attachment_access 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access" +typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 rasterizationOrderColorAttachmentAccess; + VkBool32 rasterizationOrderDepthAttachmentAccess; + VkBool32 rasterizationOrderStencilAttachmentAccess; +} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + +typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + + + +// VK_EXT_rgba10x6_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_rgba10x6_formats 1 +#define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1 +#define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats" +typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatRgba10x6WithoutYCbCrSampler; +} VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; + + + +// VK_VALVE_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. +#define VK_VALVE_mutable_descriptor_type 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" +typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 mutableDescriptorType; +} VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; + +typedef VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +typedef struct VkMutableDescriptorTypeListEXT { + uint32_t descriptorTypeCount; + const VkDescriptorType* pDescriptorTypes; +} VkMutableDescriptorTypeListEXT; + +typedef VkMutableDescriptorTypeListEXT VkMutableDescriptorTypeListVALVE; + +typedef struct VkMutableDescriptorTypeCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t mutableDescriptorTypeListCount; + const VkMutableDescriptorTypeListEXT* pMutableDescriptorTypeLists; +} VkMutableDescriptorTypeCreateInfoEXT; + +typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVALVE; + + + +// VK_EXT_vertex_input_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_input_dynamic_state 1 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" +typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexInputDynamicState; +} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +typedef struct VkVertexInputBindingDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; + uint32_t divisor; +} VkVertexInputBindingDescription2EXT; + +typedef struct VkVertexInputAttributeDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription2EXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( + VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +#endif + + +// VK_EXT_physical_device_drm is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_physical_device_drm 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm" +typedef struct VkPhysicalDeviceDrmPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 hasPrimary; + VkBool32 hasRender; + int64_t primaryMajor; + int64_t primaryMinor; + int64_t renderMajor; + int64_t renderMinor; +} VkPhysicalDeviceDrmPropertiesEXT; + + + +// VK_EXT_device_address_binding_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_address_binding_report 1 +#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1 +#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report" + +typedef enum VkDeviceAddressBindingTypeEXT { + VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT = 0, + VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT = 1, + VK_DEVICE_ADDRESS_BINDING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceAddressBindingTypeEXT; + +typedef enum VkDeviceAddressBindingFlagBitsEXT { + VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT = 0x00000001, + VK_DEVICE_ADDRESS_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceAddressBindingFlagBitsEXT; +typedef VkFlags VkDeviceAddressBindingFlagsEXT; +typedef struct VkPhysicalDeviceAddressBindingReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 reportAddressBinding; +} VkPhysicalDeviceAddressBindingReportFeaturesEXT; + +typedef struct VkDeviceAddressBindingCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddressBindingFlagsEXT flags; + VkDeviceAddress baseAddress; + VkDeviceSize size; + VkDeviceAddressBindingTypeEXT bindingType; +} VkDeviceAddressBindingCallbackDataEXT; + + + +// VK_EXT_depth_clip_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clip_control 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control" +typedef struct VkPhysicalDeviceDepthClipControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipControl; +} VkPhysicalDeviceDepthClipControlFeaturesEXT; + +typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 negativeOneToOne; +} VkPipelineViewportDepthClipControlCreateInfoEXT; + + + +// VK_EXT_primitive_topology_list_restart is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_primitive_topology_list_restart 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart" +typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 primitiveTopologyListRestart; + VkBool32 primitiveTopologyPatchListRestart; +} VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + + +// VK_HUAWEI_subpass_shading is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_subpass_shading 1 +#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 3 +#define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading" +typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI { + VkStructureType sType; + void* pNext; + VkRenderPass renderPass; + uint32_t subpass; +} VkSubpassShadingPipelineCreateInfoHUAWEI; + +typedef struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 subpassShading; +} VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; + +typedef struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI { + VkStructureType sType; + void* pNext; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio; +} VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; + +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)(VkDevice device, VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize); +typedef void (VKAPI_PTR *PFN_vkCmdSubpassShadingHUAWEI)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + VkDevice device, + VkRenderPass renderpass, + VkExtent2D* pMaxWorkgroupSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI( + VkCommandBuffer commandBuffer); +#endif + + +// VK_HUAWEI_invocation_mask is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_invocation_mask 1 +#define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1 +#define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask" +typedef struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 invocationMask; +} VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + +typedef void (VKAPI_PTR *PFN_vkCmdBindInvocationMaskHUAWEI)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); +#endif + + +// VK_NV_external_memory_rdma is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_rdma 1 +typedef void* VkRemoteAddressNV; +#define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME "VK_NV_external_memory_rdma" +typedef struct VkMemoryGetRemoteAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetRemoteAddressInfoNV; + +typedef struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 externalMemoryRDMA; +} VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryRemoteAddressNV)(VkDevice device, const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV( + VkDevice device, + const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV* pAddress); +#endif + + +// VK_EXT_pipeline_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_properties 1 +#define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties" +typedef VkPipelineInfoKHR VkPipelineInfoEXT; + +typedef struct VkPipelinePropertiesIdentifierEXT { + VkStructureType sType; + void* pNext; + uint8_t pipelineIdentifier[VK_UUID_SIZE]; +} VkPipelinePropertiesIdentifierEXT; + +typedef struct VkPhysicalDevicePipelinePropertiesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelinePropertiesIdentifier; +} VkPhysicalDevicePipelinePropertiesFeaturesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelinePropertiesEXT)(VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT( + VkDevice device, + const VkPipelineInfoEXT* pPipelineInfo, + VkBaseOutStructure* pPipelineProperties); +#endif + + +// VK_EXT_frame_boundary is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_frame_boundary 1 +#define VK_EXT_FRAME_BOUNDARY_SPEC_VERSION 1 +#define VK_EXT_FRAME_BOUNDARY_EXTENSION_NAME "VK_EXT_frame_boundary" + +typedef enum VkFrameBoundaryFlagBitsEXT { + VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT = 0x00000001, + VK_FRAME_BOUNDARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkFrameBoundaryFlagBitsEXT; +typedef VkFlags VkFrameBoundaryFlagsEXT; +typedef struct VkPhysicalDeviceFrameBoundaryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 frameBoundary; +} VkPhysicalDeviceFrameBoundaryFeaturesEXT; + +typedef struct VkFrameBoundaryEXT { + VkStructureType sType; + const void* pNext; + VkFrameBoundaryFlagsEXT flags; + uint64_t frameID; + uint32_t imageCount; + const VkImage* pImages; + uint32_t bufferCount; + const VkBuffer* pBuffers; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkFrameBoundaryEXT; + + + +// VK_EXT_multisampled_render_to_single_sampled is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_multisampled_render_to_single_sampled 1 +#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1 +#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled" +typedef struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multisampledRenderToSingleSampled; +} VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + +typedef struct VkSubpassResolvePerformanceQueryEXT { + VkStructureType sType; + void* pNext; + VkBool32 optimal; +} VkSubpassResolvePerformanceQueryEXT; + +typedef struct VkMultisampledRenderToSingleSampledInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 multisampledRenderToSingleSampledEnable; + VkSampleCountFlagBits rasterizationSamples; +} VkMultisampledRenderToSingleSampledInfoEXT; + + + +// VK_EXT_extended_dynamic_state2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state2 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" +typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState2; + VkBool32 extendedDynamicState2LogicOp; + VkBool32 extendedDynamicState2PatchControlPoints; +} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT( + VkCommandBuffer commandBuffer, + uint32_t patchControlPoints); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT( + VkCommandBuffer commandBuffer, + VkLogicOp logicOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); +#endif + + +// VK_EXT_color_write_enable is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_color_write_enable 1 +#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 +#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" +typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 colorWriteEnable; +} VkPhysicalDeviceColorWriteEnableFeaturesEXT; + +typedef struct VkPipelineColorWriteCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkBool32* pColorWriteEnables; +} VkPipelineColorWriteCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32* pColorWriteEnables); +#endif + + +// VK_EXT_primitives_generated_query is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_primitives_generated_query 1 +#define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query" +typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 primitivesGeneratedQuery; + VkBool32 primitivesGeneratedQueryWithRasterizerDiscard; + VkBool32 primitivesGeneratedQueryWithNonZeroStreams; +} VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + + +// VK_EXT_global_priority_query is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_global_priority_query 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" +#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT VK_MAX_GLOBAL_PRIORITY_SIZE_KHR +typedef VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; + +typedef VkQueueFamilyGlobalPriorityPropertiesKHR VkQueueFamilyGlobalPriorityPropertiesEXT; + + + +// VK_EXT_image_view_min_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_view_min_lod 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod" +typedef struct VkPhysicalDeviceImageViewMinLodFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 minLod; +} VkPhysicalDeviceImageViewMinLodFeaturesEXT; + +typedef struct VkImageViewMinLodCreateInfoEXT { + VkStructureType sType; + const void* pNext; + float minLod; +} VkImageViewMinLodCreateInfoEXT; + + + +// VK_EXT_multi_draw is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_multi_draw 1 +#define VK_EXT_MULTI_DRAW_SPEC_VERSION 1 +#define VK_EXT_MULTI_DRAW_EXTENSION_NAME "VK_EXT_multi_draw" +typedef struct VkPhysicalDeviceMultiDrawFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multiDraw; +} VkPhysicalDeviceMultiDrawFeaturesEXT; + +typedef struct VkPhysicalDeviceMultiDrawPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxMultiDrawCount; +} VkPhysicalDeviceMultiDrawPropertiesEXT; + +typedef struct VkMultiDrawInfoEXT { + uint32_t firstVertex; + uint32_t vertexCount; +} VkMultiDrawInfoEXT; + +typedef struct VkMultiDrawIndexedInfoEXT { + uint32_t firstIndex; + uint32_t indexCount; + int32_t vertexOffset; +} VkMultiDrawIndexedInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiIndexedEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT* pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT* pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t* pVertexOffset); +#endif + + +// VK_EXT_image_2d_view_of_3d is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_2d_view_of_3d 1 +#define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1 +#define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d" +typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 image2DViewOf3D; + VkBool32 sampler2DViewOf3D; +} VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; + + + +// VK_EXT_shader_tile_image is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_tile_image 1 +#define VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION 1 +#define VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME "VK_EXT_shader_tile_image" +typedef struct VkPhysicalDeviceShaderTileImageFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderTileImageColorReadAccess; + VkBool32 shaderTileImageDepthReadAccess; + VkBool32 shaderTileImageStencilReadAccess; +} VkPhysicalDeviceShaderTileImageFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderTileImagePropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderTileImageCoherentReadAccelerated; + VkBool32 shaderTileImageReadSampleFromPixelRateInvocation; + VkBool32 shaderTileImageReadFromHelperInvocation; +} VkPhysicalDeviceShaderTileImagePropertiesEXT; + + + +// VK_EXT_opacity_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_opacity_micromap 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT) +#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2 +#define VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME "VK_EXT_opacity_micromap" + +typedef enum VkMicromapTypeEXT { + VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT = 0, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV = 1000397000, +#endif + VK_MICROMAP_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkMicromapTypeEXT; + +typedef enum VkBuildMicromapModeEXT { + VK_BUILD_MICROMAP_MODE_BUILD_EXT = 0, + VK_BUILD_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBuildMicromapModeEXT; + +typedef enum VkCopyMicromapModeEXT { + VK_COPY_MICROMAP_MODE_CLONE_EXT = 0, + VK_COPY_MICROMAP_MODE_SERIALIZE_EXT = 1, + VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT = 2, + VK_COPY_MICROMAP_MODE_COMPACT_EXT = 3, + VK_COPY_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkCopyMicromapModeEXT; + +typedef enum VkOpacityMicromapFormatEXT { + VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT = 1, + VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT = 2, + VK_OPACITY_MICROMAP_FORMAT_MAX_ENUM_EXT = 0x7FFFFFFF +} VkOpacityMicromapFormatEXT; + +typedef enum VkOpacityMicromapSpecialIndexEXT { + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT = -1, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT = -2, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT = -3, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT = -4, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_MAX_ENUM_EXT = 0x7FFFFFFF +} VkOpacityMicromapSpecialIndexEXT; + +typedef enum VkAccelerationStructureCompatibilityKHR { + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCompatibilityKHR; + +typedef enum VkAccelerationStructureBuildTypeKHR { + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureBuildTypeKHR; + +typedef enum VkBuildMicromapFlagBitsEXT { + VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT = 0x00000001, + VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT = 0x00000002, + VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT = 0x00000004, + VK_BUILD_MICROMAP_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBuildMicromapFlagBitsEXT; +typedef VkFlags VkBuildMicromapFlagsEXT; + +typedef enum VkMicromapCreateFlagBitsEXT { + VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000001, + VK_MICROMAP_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkMicromapCreateFlagBitsEXT; +typedef VkFlags VkMicromapCreateFlagsEXT; +typedef struct VkMicromapUsageEXT { + uint32_t count; + uint32_t subdivisionLevel; + uint32_t format; +} VkMicromapUsageEXT; + +typedef union VkDeviceOrHostAddressKHR { + VkDeviceAddress deviceAddress; + void* hostAddress; +} VkDeviceOrHostAddressKHR; + +typedef struct VkMicromapBuildInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapTypeEXT type; + VkBuildMicromapFlagsEXT flags; + VkBuildMicromapModeEXT mode; + VkMicromapEXT dstMicromap; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkDeviceOrHostAddressConstKHR data; + VkDeviceOrHostAddressKHR scratchData; + VkDeviceOrHostAddressConstKHR triangleArray; + VkDeviceSize triangleArrayStride; +} VkMicromapBuildInfoEXT; + +typedef struct VkMicromapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapCreateFlagsEXT createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkMicromapTypeEXT type; + VkDeviceAddress deviceAddress; +} VkMicromapCreateInfoEXT; + +typedef struct VkPhysicalDeviceOpacityMicromapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 micromap; + VkBool32 micromapCaptureReplay; + VkBool32 micromapHostCommands; +} VkPhysicalDeviceOpacityMicromapFeaturesEXT; + +typedef struct VkPhysicalDeviceOpacityMicromapPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxOpacity2StateSubdivisionLevel; + uint32_t maxOpacity4StateSubdivisionLevel; +} VkPhysicalDeviceOpacityMicromapPropertiesEXT; + +typedef struct VkMicromapVersionInfoEXT { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkMicromapVersionInfoEXT; + +typedef struct VkCopyMicromapToMemoryInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapEXT src; + VkDeviceOrHostAddressKHR dst; + VkCopyMicromapModeEXT mode; +} VkCopyMicromapToMemoryInfoEXT; + +typedef struct VkCopyMemoryToMicromapInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkMicromapEXT dst; + VkCopyMicromapModeEXT mode; +} VkCopyMemoryToMicromapInfoEXT; + +typedef struct VkCopyMicromapInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapEXT src; + VkMicromapEXT dst; + VkCopyMicromapModeEXT mode; +} VkCopyMicromapInfoEXT; + +typedef struct VkMicromapBuildSizesInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceSize micromapSize; + VkDeviceSize buildScratchSize; + VkBool32 discardable; +} VkMicromapBuildSizesInfoEXT; + +typedef struct VkAccelerationStructureTrianglesOpacityMicromapEXT { + VkStructureType sType; + void* pNext; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexBuffer; + VkDeviceSize indexStride; + uint32_t baseTriangle; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkMicromapEXT micromap; +} VkAccelerationStructureTrianglesOpacityMicromapEXT; + +typedef struct VkMicromapTriangleEXT { + uint32_t dataOffset; + uint16_t subdivisionLevel; + uint16_t format; +} VkMicromapTriangleEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMicromapEXT)(VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap); +typedef void (VKAPI_PTR *PFN_vkDestroyMicromapEXT)(VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildMicromapsEXT)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBuildMicromapsEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapToMemoryEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteMicromapsPropertiesEXT)(VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapToMemoryEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteMicromapsPropertiesEXT)(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMicromapCompatibilityEXT)(VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetMicromapBuildSizesEXT)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMicromapEXT( + VkDevice device, + const VkMicromapCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkMicromapEXT* pMicromap); + +VKAPI_ATTR void VKAPI_CALL vkDestroyMicromapEXT( + VkDevice device, + VkMicromapEXT micromap, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildMicromapsEXT( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildMicromapsEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapToMemoryEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToMicromapEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteMicromapsPropertiesEXT( + VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapToMemoryEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapToMemoryInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToMicromapEXT( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToMicromapInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteMicromapsPropertiesEXT( + VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMicromapCompatibilityEXT( + VkDevice device, + const VkMicromapVersionInfoEXT* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT* pBuildInfo, + VkMicromapBuildSizesInfoEXT* pSizeInfo); +#endif + + +// VK_EXT_load_store_op_none is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_load_store_op_none 1 +#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1 +#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none" + + +// VK_HUAWEI_cluster_culling_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_cluster_culling_shader 1 +#define VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION 3 +#define VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME "VK_HUAWEI_cluster_culling_shader" +typedef struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 clustercullingShader; + VkBool32 multiviewClusterCullingShader; +} VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + +typedef struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI { + VkStructureType sType; + void* pNext; + uint32_t maxWorkGroupCount[3]; + uint32_t maxWorkGroupSize[3]; + uint32_t maxOutputClusterCount; + VkDeviceSize indirectBufferOffsetAlignment; +} VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + +typedef struct VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 clusterShadingRate; +} VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterHUAWEI)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterIndirectHUAWEI)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterHUAWEI( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterIndirectHUAWEI( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); +#endif + + +// VK_EXT_border_color_swizzle is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_border_color_swizzle 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle" +typedef struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 borderColorSwizzle; + VkBool32 borderColorSwizzleFromImage; +} VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; + +typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkComponentMapping components; + VkBool32 srgb; +} VkSamplerBorderColorComponentMappingCreateInfoEXT; + + + +// VK_EXT_pageable_device_local_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pageable_device_local_memory 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory" +typedef struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pageableDeviceLocalMemory; +} VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkSetDeviceMemoryPriorityEXT)(VkDevice device, VkDeviceMemory memory, float priority); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT( + VkDevice device, + VkDeviceMemory memory, + float priority); +#endif + + +// VK_ARM_shader_core_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_shader_core_properties 1 +#define VK_ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION 1 +#define VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_ARM_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesARM { + VkStructureType sType; + void* pNext; + uint32_t pixelRate; + uint32_t texelRate; + uint32_t fmaRate; +} VkPhysicalDeviceShaderCorePropertiesARM; + + + +// VK_ARM_scheduling_controls is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_scheduling_controls 1 +#define VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION 1 +#define VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME "VK_ARM_scheduling_controls" +typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagsARM; + +// Flag bits for VkPhysicalDeviceSchedulingControlsFlagBitsARM +typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagBitsARM; +static const VkPhysicalDeviceSchedulingControlsFlagBitsARM VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM = 0x00000001ULL; + +typedef struct VkDeviceQueueShaderCoreControlCreateInfoARM { + VkStructureType sType; + void* pNext; + uint32_t shaderCoreCount; +} VkDeviceQueueShaderCoreControlCreateInfoARM; + +typedef struct VkPhysicalDeviceSchedulingControlsFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 schedulingControls; +} VkPhysicalDeviceSchedulingControlsFeaturesARM; + +typedef struct VkPhysicalDeviceSchedulingControlsPropertiesARM { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags; +} VkPhysicalDeviceSchedulingControlsPropertiesARM; + + + +// VK_EXT_image_sliced_view_of_3d is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_sliced_view_of_3d 1 +#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION 1 +#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_sliced_view_of_3d" +#define VK_REMAINING_3D_SLICES_EXT (~0U) +typedef struct VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageSlicedViewOf3D; +} VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + +typedef struct VkImageViewSlicedCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t sliceOffset; + uint32_t sliceCount; +} VkImageViewSlicedCreateInfoEXT; + + + +// VK_VALVE_descriptor_set_host_mapping is a preprocessor guard. Do not pass it to API calls. +#define VK_VALVE_descriptor_set_host_mapping 1 +#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1 +#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping" +typedef struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE { + VkStructureType sType; + void* pNext; + VkBool32 descriptorSetHostMapping; +} VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + +typedef struct VkDescriptorSetBindingReferenceVALVE { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayout descriptorSetLayout; + uint32_t binding; +} VkDescriptorSetBindingReferenceVALVE; + +typedef struct VkDescriptorSetLayoutHostMappingInfoVALVE { + VkStructureType sType; + void* pNext; + size_t descriptorOffset; + uint32_t descriptorSize; +} VkDescriptorSetLayoutHostMappingInfoVALVE; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)(VkDevice device, const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetHostMappingVALVE)(VkDevice device, VkDescriptorSet descriptorSet, void** ppData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutHostMappingInfoVALVE( + VkDevice device, + const VkDescriptorSetBindingReferenceVALVE* pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE( + VkDevice device, + VkDescriptorSet descriptorSet, + void** ppData); +#endif + + +// VK_EXT_depth_clamp_zero_one is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clamp_zero_one 1 +#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one" +typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClampZeroOne; +} VkPhysicalDeviceDepthClampZeroOneFeaturesEXT; + + + +// VK_EXT_non_seamless_cube_map is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_non_seamless_cube_map 1 +#define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1 +#define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map" +typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nonSeamlessCubeMap; +} VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + + +// VK_ARM_render_pass_striped is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_render_pass_striped 1 +#define VK_ARM_RENDER_PASS_STRIPED_SPEC_VERSION 1 +#define VK_ARM_RENDER_PASS_STRIPED_EXTENSION_NAME "VK_ARM_render_pass_striped" +typedef struct VkPhysicalDeviceRenderPassStripedFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 renderPassStriped; +} VkPhysicalDeviceRenderPassStripedFeaturesARM; + +typedef struct VkPhysicalDeviceRenderPassStripedPropertiesARM { + VkStructureType sType; + void* pNext; + VkExtent2D renderPassStripeGranularity; + uint32_t maxRenderPassStripes; +} VkPhysicalDeviceRenderPassStripedPropertiesARM; + +typedef struct VkRenderPassStripeInfoARM { + VkStructureType sType; + const void* pNext; + VkRect2D stripeArea; +} VkRenderPassStripeInfoARM; + +typedef struct VkRenderPassStripeBeginInfoARM { + VkStructureType sType; + const void* pNext; + uint32_t stripeInfoCount; + const VkRenderPassStripeInfoARM* pStripeInfos; +} VkRenderPassStripeBeginInfoARM; + +typedef struct VkRenderPassStripeSubmitInfoARM { + VkStructureType sType; + const void* pNext; + uint32_t stripeSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pStripeSemaphoreInfos; +} VkRenderPassStripeSubmitInfoARM; + + + +// VK_QCOM_fragment_density_map_offset is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_fragment_density_map_offset 1 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapOffset; +} VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent2D fragmentDensityOffsetGranularity; +} VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + +typedef struct VkSubpassFragmentDensityMapOffsetEndInfoQCOM { + VkStructureType sType; + const void* pNext; + uint32_t fragmentDensityOffsetCount; + const VkOffset2D* pFragmentDensityOffsets; +} VkSubpassFragmentDensityMapOffsetEndInfoQCOM; + + + +// VK_NV_copy_memory_indirect is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_copy_memory_indirect 1 +#define VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION 1 +#define VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "VK_NV_copy_memory_indirect" +typedef struct VkCopyMemoryIndirectCommandNV { + VkDeviceAddress srcAddress; + VkDeviceAddress dstAddress; + VkDeviceSize size; +} VkCopyMemoryIndirectCommandNV; + +typedef struct VkCopyMemoryToImageIndirectCommandNV { + VkDeviceAddress srcAddress; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkCopyMemoryToImageIndirectCommandNV; + +typedef struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 indirectCopy; +} VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; + +typedef struct VkPhysicalDeviceCopyMemoryIndirectPropertiesNV { + VkStructureType sType; + void* pNext; + VkQueueFlags supportedQueues; +} VkPhysicalDeviceCopyMemoryIndirectPropertiesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToImageIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers* pImageSubresources); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryIndirectNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToImageIndirectNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers* pImageSubresources); +#endif + + +// VK_NV_memory_decompression is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_memory_decompression 1 +#define VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION 1 +#define VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME "VK_NV_memory_decompression" + +// Flag bits for VkMemoryDecompressionMethodFlagBitsNV +typedef VkFlags64 VkMemoryDecompressionMethodFlagBitsNV; +static const VkMemoryDecompressionMethodFlagBitsNV VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV = 0x00000001ULL; + +typedef VkFlags64 VkMemoryDecompressionMethodFlagsNV; +typedef struct VkDecompressMemoryRegionNV { + VkDeviceAddress srcAddress; + VkDeviceAddress dstAddress; + VkDeviceSize compressedSize; + VkDeviceSize decompressedSize; + VkMemoryDecompressionMethodFlagsNV decompressionMethod; +} VkDecompressMemoryRegionNV; + +typedef struct VkPhysicalDeviceMemoryDecompressionFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 memoryDecompression; +} VkPhysicalDeviceMemoryDecompressionFeaturesNV; + +typedef struct VkPhysicalDeviceMemoryDecompressionPropertiesNV { + VkStructureType sType; + void* pNext; + VkMemoryDecompressionMethodFlagsNV decompressionMethods; + uint64_t maxDecompressionIndirectCount; +} VkPhysicalDeviceMemoryDecompressionPropertiesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryNV)(VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV* pDecompressMemoryRegions); +typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryIndirectCountNV)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryNV( + VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV* pDecompressMemoryRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride); +#endif + + +// VK_NV_device_generated_commands_compute is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands_compute 1 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION 2 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME "VK_NV_device_generated_commands_compute" +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCompute; + VkBool32 deviceGeneratedComputePipelines; + VkBool32 deviceGeneratedComputeCaptureReplay; +} VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + +typedef struct VkComputePipelineIndirectBufferInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; + VkDeviceAddress pipelineDeviceAddressCaptureReplay; +} VkComputePipelineIndirectBufferInfoNV; + +typedef struct VkPipelineIndirectDeviceAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; +} VkPipelineIndirectDeviceAddressInfoNV; + +typedef struct VkBindPipelineIndirectCommandNV { + VkDeviceAddress pipelineAddress; +} VkBindPipelineIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkGetPipelineIndirectMemoryRequirementsNV)(VkDevice device, const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdUpdatePipelineIndirectBufferNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetPipelineIndirectDeviceAddressNV)(VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPipelineIndirectMemoryRequirementsNV( + VkDevice device, + const VkComputePipelineCreateInfo* pCreateInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdatePipelineIndirectBufferNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetPipelineIndirectDeviceAddressNV( + VkDevice device, + const VkPipelineIndirectDeviceAddressInfoNV* pInfo); +#endif + + +// VK_NV_linear_color_attachment is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_linear_color_attachment 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment" +typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 linearColorAttachment; +} VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + + + +// VK_GOOGLE_surfaceless_query is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_surfaceless_query 1 +#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2 +#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query" + + +// VK_EXT_image_compression_control_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_compression_control_swapchain 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain" +typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageCompressionControlSwapchain; +} VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + + +// VK_QCOM_image_processing is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_image_processing 1 +#define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1 +#define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing" +typedef struct VkImageViewSampleWeightCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkOffset2D filterCenter; + VkExtent2D filterSize; + uint32_t numPhases; +} VkImageViewSampleWeightCreateInfoQCOM; + +typedef struct VkPhysicalDeviceImageProcessingFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 textureSampleWeighted; + VkBool32 textureBoxFilter; + VkBool32 textureBlockMatch; +} VkPhysicalDeviceImageProcessingFeaturesQCOM; + +typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM { + VkStructureType sType; + void* pNext; + uint32_t maxWeightFilterPhases; + VkExtent2D maxWeightFilterDimension; + VkExtent2D maxBlockMatchRegion; + VkExtent2D maxBoxFilterBlockSize; +} VkPhysicalDeviceImageProcessingPropertiesQCOM; + + + +// VK_EXT_nested_command_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_nested_command_buffer 1 +#define VK_EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION 1 +#define VK_EXT_NESTED_COMMAND_BUFFER_EXTENSION_NAME "VK_EXT_nested_command_buffer" +typedef struct VkPhysicalDeviceNestedCommandBufferFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nestedCommandBuffer; + VkBool32 nestedCommandBufferRendering; + VkBool32 nestedCommandBufferSimultaneousUse; +} VkPhysicalDeviceNestedCommandBufferFeaturesEXT; + +typedef struct VkPhysicalDeviceNestedCommandBufferPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCommandBufferNestingLevel; +} VkPhysicalDeviceNestedCommandBufferPropertiesEXT; + + + +// VK_EXT_external_memory_acquire_unmodified is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_acquire_unmodified 1 +#define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME "VK_EXT_external_memory_acquire_unmodified" +typedef struct VkExternalMemoryAcquireUnmodifiedEXT { + VkStructureType sType; + const void* pNext; + VkBool32 acquireUnmodifiedMemory; +} VkExternalMemoryAcquireUnmodifiedEXT; + + + +// VK_EXT_extended_dynamic_state3 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state3 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3" +typedef struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState3TessellationDomainOrigin; + VkBool32 extendedDynamicState3DepthClampEnable; + VkBool32 extendedDynamicState3PolygonMode; + VkBool32 extendedDynamicState3RasterizationSamples; + VkBool32 extendedDynamicState3SampleMask; + VkBool32 extendedDynamicState3AlphaToCoverageEnable; + VkBool32 extendedDynamicState3AlphaToOneEnable; + VkBool32 extendedDynamicState3LogicOpEnable; + VkBool32 extendedDynamicState3ColorBlendEnable; + VkBool32 extendedDynamicState3ColorBlendEquation; + VkBool32 extendedDynamicState3ColorWriteMask; + VkBool32 extendedDynamicState3RasterizationStream; + VkBool32 extendedDynamicState3ConservativeRasterizationMode; + VkBool32 extendedDynamicState3ExtraPrimitiveOverestimationSize; + VkBool32 extendedDynamicState3DepthClipEnable; + VkBool32 extendedDynamicState3SampleLocationsEnable; + VkBool32 extendedDynamicState3ColorBlendAdvanced; + VkBool32 extendedDynamicState3ProvokingVertexMode; + VkBool32 extendedDynamicState3LineRasterizationMode; + VkBool32 extendedDynamicState3LineStippleEnable; + VkBool32 extendedDynamicState3DepthClipNegativeOneToOne; + VkBool32 extendedDynamicState3ViewportWScalingEnable; + VkBool32 extendedDynamicState3ViewportSwizzle; + VkBool32 extendedDynamicState3CoverageToColorEnable; + VkBool32 extendedDynamicState3CoverageToColorLocation; + VkBool32 extendedDynamicState3CoverageModulationMode; + VkBool32 extendedDynamicState3CoverageModulationTableEnable; + VkBool32 extendedDynamicState3CoverageModulationTable; + VkBool32 extendedDynamicState3CoverageReductionMode; + VkBool32 extendedDynamicState3RepresentativeFragmentTestEnable; + VkBool32 extendedDynamicState3ShadingRateImageEnable; +} VkPhysicalDeviceExtendedDynamicState3FeaturesEXT; + +typedef struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 dynamicPrimitiveTopologyUnrestricted; +} VkPhysicalDeviceExtendedDynamicState3PropertiesEXT; + +typedef struct VkColorBlendEquationEXT { + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; +} VkColorBlendEquationEXT; + +typedef struct VkColorBlendAdvancedEXT { + VkBlendOp advancedBlendOp; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; + VkBool32 clampResults; +} VkColorBlendAdvancedEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetTessellationDomainOriginEXT)(VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetPolygonModeEXT)(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationSamplesEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples); +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleMaskEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToCoverageEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToOneEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEnableEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEquationEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteMaskEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationStreamEXT)(VkCommandBuffer commandBuffer, uint32_t rasterizationStream); +typedef void (VKAPI_PTR *PFN_vkCmdSetConservativeRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendAdvancedEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced); +typedef void (VKAPI_PTR *PFN_vkCmdSetProvokingVertexModeEXT)(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipNegativeOneToOneEXT)(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingEnableNV)(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportSwizzleNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorLocationNV)(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationModeNV)(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableNV)(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable); +typedef void (VKAPI_PTR *PFN_vkCmdSetShadingRateImageEnableNV)(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetRepresentativeFragmentTestEnableNV)(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageReductionModeNV)(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetTessellationDomainOriginEXT( + VkCommandBuffer commandBuffer, + VkTessellationDomainOrigin domainOrigin); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthClampEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPolygonModeEXT( + VkCommandBuffer commandBuffer, + VkPolygonMode polygonMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationSamplesEXT( + VkCommandBuffer commandBuffer, + VkSampleCountFlagBits rasterizationSamples); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleMaskEXT( + VkCommandBuffer commandBuffer, + VkSampleCountFlagBits samples, + const VkSampleMask* pSampleMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToCoverageEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 alphaToCoverageEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToOneEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 alphaToOneEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 logicOpEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32* pColorBlendEnables); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEquationEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT* pColorBlendEquations); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteMaskEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags* pColorWriteMasks); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationStreamEXT( + VkCommandBuffer commandBuffer, + uint32_t rasterizationStream); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetConservativeRasterizationModeEXT( + VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetExtraPrimitiveOverestimationSizeEXT( + VkCommandBuffer commandBuffer, + float extraPrimitiveOverestimationSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthClipEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 sampleLocationsEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendAdvancedEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT* pColorBlendAdvanced); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetProvokingVertexModeEXT( + VkCommandBuffer commandBuffer, + VkProvokingVertexModeEXT provokingVertexMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineRasterizationModeEXT( + VkCommandBuffer commandBuffer, + VkLineRasterizationModeEXT lineRasterizationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stippledLineEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipNegativeOneToOneEXT( + VkCommandBuffer commandBuffer, + VkBool32 negativeOneToOne); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 viewportWScalingEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportSwizzleNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV* pViewportSwizzles); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 coverageToColorEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorLocationNV( + VkCommandBuffer commandBuffer, + uint32_t coverageToColorLocation); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationModeNV( + VkCommandBuffer commandBuffer, + VkCoverageModulationModeNV coverageModulationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 coverageModulationTableEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableNV( + VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float* pCoverageModulationTable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetShadingRateImageEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 shadingRateImageEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRepresentativeFragmentTestEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 representativeFragmentTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV( + VkCommandBuffer commandBuffer, + VkCoverageReductionModeNV coverageReductionMode); +#endif + + +// VK_EXT_subpass_merge_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_subpass_merge_feedback 1 +#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2 +#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback" + +typedef enum VkSubpassMergeStatusEXT { + VK_SUBPASS_MERGE_STATUS_MERGED_EXT = 0, + VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT = 1, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT = 2, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT = 3, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT = 4, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT = 5, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT = 6, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT = 7, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT = 8, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT = 9, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT = 10, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT = 11, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT = 12, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT = 13, + VK_SUBPASS_MERGE_STATUS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSubpassMergeStatusEXT; +typedef struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subpassMergeFeedback; +} VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + +typedef struct VkRenderPassCreationControlEXT { + VkStructureType sType; + const void* pNext; + VkBool32 disallowMerging; +} VkRenderPassCreationControlEXT; + +typedef struct VkRenderPassCreationFeedbackInfoEXT { + uint32_t postMergeSubpassCount; +} VkRenderPassCreationFeedbackInfoEXT; + +typedef struct VkRenderPassCreationFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkRenderPassCreationFeedbackInfoEXT* pRenderPassFeedback; +} VkRenderPassCreationFeedbackCreateInfoEXT; + +typedef struct VkRenderPassSubpassFeedbackInfoEXT { + VkSubpassMergeStatusEXT subpassMergeStatus; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t postMergeIndex; +} VkRenderPassSubpassFeedbackInfoEXT; + +typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkRenderPassSubpassFeedbackInfoEXT* pSubpassFeedback; +} VkRenderPassSubpassFeedbackCreateInfoEXT; + + + +// VK_LUNARG_direct_driver_loading is a preprocessor guard. Do not pass it to API calls. +#define VK_LUNARG_direct_driver_loading 1 +#define VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION 1 +#define VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME "VK_LUNARG_direct_driver_loading" + +typedef enum VkDirectDriverLoadingModeLUNARG { + VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG = 0, + VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG = 1, + VK_DIRECT_DRIVER_LOADING_MODE_MAX_ENUM_LUNARG = 0x7FFFFFFF +} VkDirectDriverLoadingModeLUNARG; +typedef VkFlags VkDirectDriverLoadingFlagsLUNARG; +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddrLUNARG)( + VkInstance instance, const char* pName); + +typedef struct VkDirectDriverLoadingInfoLUNARG { + VkStructureType sType; + void* pNext; + VkDirectDriverLoadingFlagsLUNARG flags; + PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr; +} VkDirectDriverLoadingInfoLUNARG; + +typedef struct VkDirectDriverLoadingListLUNARG { + VkStructureType sType; + const void* pNext; + VkDirectDriverLoadingModeLUNARG mode; + uint32_t driverCount; + const VkDirectDriverLoadingInfoLUNARG* pDrivers; +} VkDirectDriverLoadingListLUNARG; + + + +// VK_EXT_shader_module_identifier is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_module_identifier 1 +#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U +#define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1 +#define VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME "VK_EXT_shader_module_identifier" +typedef struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderModuleIdentifier; +} VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint8_t shaderModuleIdentifierAlgorithmUUID[VK_UUID_SIZE]; +} VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; + +typedef struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t identifierSize; + const uint8_t* pIdentifier; +} VkPipelineShaderStageModuleIdentifierCreateInfoEXT; + +typedef struct VkShaderModuleIdentifierEXT { + VkStructureType sType; + void* pNext; + uint32_t identifierSize; + uint8_t identifier[VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT]; +} VkShaderModuleIdentifierEXT; + +typedef void (VKAPI_PTR *PFN_vkGetShaderModuleIdentifierEXT)(VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier); +typedef void (VKAPI_PTR *PFN_vkGetShaderModuleCreateInfoIdentifierEXT)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleIdentifierEXT( + VkDevice device, + VkShaderModule shaderModule, + VkShaderModuleIdentifierEXT* pIdentifier); + +VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + VkShaderModuleIdentifierEXT* pIdentifier); +#endif + + +// VK_EXT_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_rasterization_order_attachment_access 1 +#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access" + + +// VK_NV_optical_flow is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_optical_flow 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV) +#define VK_NV_OPTICAL_FLOW_SPEC_VERSION 1 +#define VK_NV_OPTICAL_FLOW_EXTENSION_NAME "VK_NV_optical_flow" + +typedef enum VkOpticalFlowPerformanceLevelNV { + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV = 1, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV = 2, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV = 3, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowPerformanceLevelNV; + +typedef enum VkOpticalFlowSessionBindingPointNV { + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV = 1, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV = 2, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV = 3, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV = 4, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV = 5, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV = 6, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV = 7, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV = 8, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowSessionBindingPointNV; + +typedef enum VkOpticalFlowGridSizeFlagBitsNV { + VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_GRID_SIZE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowGridSizeFlagBitsNV; +typedef VkFlags VkOpticalFlowGridSizeFlagsNV; + +typedef enum VkOpticalFlowUsageFlagBitsNV { + VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_USAGE_COST_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV = 0x00000010, + VK_OPTICAL_FLOW_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowUsageFlagBitsNV; +typedef VkFlags VkOpticalFlowUsageFlagsNV; + +typedef enum VkOpticalFlowSessionCreateFlagBitsNV { + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV = 0x00000010, + VK_OPTICAL_FLOW_SESSION_CREATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowSessionCreateFlagBitsNV; +typedef VkFlags VkOpticalFlowSessionCreateFlagsNV; + +typedef enum VkOpticalFlowExecuteFlagBitsNV { + VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_EXECUTE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowExecuteFlagBitsNV; +typedef VkFlags VkOpticalFlowExecuteFlagsNV; +typedef struct VkPhysicalDeviceOpticalFlowFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 opticalFlow; +} VkPhysicalDeviceOpticalFlowFeaturesNV; + +typedef struct VkPhysicalDeviceOpticalFlowPropertiesNV { + VkStructureType sType; + void* pNext; + VkOpticalFlowGridSizeFlagsNV supportedOutputGridSizes; + VkOpticalFlowGridSizeFlagsNV supportedHintGridSizes; + VkBool32 hintSupported; + VkBool32 costSupported; + VkBool32 bidirectionalFlowSupported; + VkBool32 globalFlowSupported; + uint32_t minWidth; + uint32_t minHeight; + uint32_t maxWidth; + uint32_t maxHeight; + uint32_t maxNumRegionsOfInterest; +} VkPhysicalDeviceOpticalFlowPropertiesNV; + +typedef struct VkOpticalFlowImageFormatInfoNV { + VkStructureType sType; + const void* pNext; + VkOpticalFlowUsageFlagsNV usage; +} VkOpticalFlowImageFormatInfoNV; + +typedef struct VkOpticalFlowImageFormatPropertiesNV { + VkStructureType sType; + const void* pNext; + VkFormat format; +} VkOpticalFlowImageFormatPropertiesNV; + +typedef struct VkOpticalFlowSessionCreateInfoNV { + VkStructureType sType; + void* pNext; + uint32_t width; + uint32_t height; + VkFormat imageFormat; + VkFormat flowVectorFormat; + VkFormat costFormat; + VkOpticalFlowGridSizeFlagsNV outputGridSize; + VkOpticalFlowGridSizeFlagsNV hintGridSize; + VkOpticalFlowPerformanceLevelNV performanceLevel; + VkOpticalFlowSessionCreateFlagsNV flags; +} VkOpticalFlowSessionCreateInfoNV; + +typedef struct VkOpticalFlowSessionCreatePrivateDataInfoNV { + VkStructureType sType; + void* pNext; + uint32_t id; + uint32_t size; + const void* pPrivateData; +} VkOpticalFlowSessionCreatePrivateDataInfoNV; + +typedef struct VkOpticalFlowExecuteInfoNV { + VkStructureType sType; + void* pNext; + VkOpticalFlowExecuteFlagsNV flags; + uint32_t regionCount; + const VkRect2D* pRegions; +} VkOpticalFlowExecuteInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateOpticalFlowSessionNV)(VkDevice device, const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession); +typedef void (VKAPI_PTR *PFN_vkDestroyOpticalFlowSessionNV)(VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkBindOpticalFlowSessionImageNV)(VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout); +typedef void (VKAPI_PTR *PFN_vkCmdOpticalFlowExecuteNV)(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, + uint32_t* pFormatCount, + VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateOpticalFlowSessionNV( + VkDevice device, + const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkOpticalFlowSessionNV* pSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyOpticalFlowSessionNV( + VkDevice device, + VkOpticalFlowSessionNV session, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindOpticalFlowSessionImageNV( + VkDevice device, + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout); + +VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV( + VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV* pExecuteInfo); +#endif + + +// VK_EXT_legacy_dithering is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_legacy_dithering 1 +#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 1 +#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering" +typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 legacyDithering; +} VkPhysicalDeviceLegacyDitheringFeaturesEXT; + + + +// VK_EXT_pipeline_protected_access is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_protected_access 1 +#define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access" +typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineProtectedAccess; +} VkPhysicalDevicePipelineProtectedAccessFeaturesEXT; + + + +// VK_EXT_shader_object is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_object 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderEXT) +#define VK_EXT_SHADER_OBJECT_SPEC_VERSION 1 +#define VK_EXT_SHADER_OBJECT_EXTENSION_NAME "VK_EXT_shader_object" + +typedef enum VkShaderCodeTypeEXT { + VK_SHADER_CODE_TYPE_BINARY_EXT = 0, + VK_SHADER_CODE_TYPE_SPIRV_EXT = 1, + VK_SHADER_CODE_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkShaderCodeTypeEXT; + +typedef enum VkShaderCreateFlagBitsEXT { + VK_SHADER_CREATE_LINK_STAGE_BIT_EXT = 0x00000001, + VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000002, + VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000004, + VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT = 0x00000008, + VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT = 0x00000010, + VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT = 0x00000020, + VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00000040, + VK_SHADER_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkShaderCreateFlagBitsEXT; +typedef VkFlags VkShaderCreateFlagsEXT; +typedef struct VkPhysicalDeviceShaderObjectFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderObject; +} VkPhysicalDeviceShaderObjectFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderObjectPropertiesEXT { + VkStructureType sType; + void* pNext; + uint8_t shaderBinaryUUID[VK_UUID_SIZE]; + uint32_t shaderBinaryVersion; +} VkPhysicalDeviceShaderObjectPropertiesEXT; + +typedef struct VkShaderCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderCreateFlagsEXT flags; + VkShaderStageFlagBits stage; + VkShaderStageFlags nextStage; + VkShaderCodeTypeEXT codeType; + size_t codeSize; + const void* pCode; + const char* pName; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; + const VkSpecializationInfo* pSpecializationInfo; +} VkShaderCreateInfoEXT; + +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkShaderRequiredSubgroupSizeCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateShadersEXT)(VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderEXT)(VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderBinaryDataEXT)(VkDevice device, VkShaderEXT shader, size_t* pDataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdBindShadersEXT)(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShadersEXT( + VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkShaderEXT* pShaders); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderEXT( + VkDevice device, + VkShaderEXT shader, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderBinaryDataEXT( + VkDevice device, + VkShaderEXT shader, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadersEXT( + VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits* pStages, + const VkShaderEXT* pShaders); +#endif + + +// VK_QCOM_tile_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_tile_properties 1 +#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1 +#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties" +typedef struct VkPhysicalDeviceTilePropertiesFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 tileProperties; +} VkPhysicalDeviceTilePropertiesFeaturesQCOM; + +typedef struct VkTilePropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent3D tileSize; + VkExtent2D apronSize; + VkOffset2D origin; +} VkTilePropertiesQCOM; + +typedef VkResult (VKAPI_PTR *PFN_vkGetFramebufferTilePropertiesQCOM)(VkDevice device, VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDynamicRenderingTilePropertiesQCOM)(VkDevice device, const VkRenderingInfo* pRenderingInfo, VkTilePropertiesQCOM* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetFramebufferTilePropertiesQCOM( + VkDevice device, + VkFramebuffer framebuffer, + uint32_t* pPropertiesCount, + VkTilePropertiesQCOM* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM( + VkDevice device, + const VkRenderingInfo* pRenderingInfo, + VkTilePropertiesQCOM* pProperties); +#endif + + +// VK_SEC_amigo_profiling is a preprocessor guard. Do not pass it to API calls. +#define VK_SEC_amigo_profiling 1 +#define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1 +#define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling" +typedef struct VkPhysicalDeviceAmigoProfilingFeaturesSEC { + VkStructureType sType; + void* pNext; + VkBool32 amigoProfiling; +} VkPhysicalDeviceAmigoProfilingFeaturesSEC; + +typedef struct VkAmigoProfilingSubmitInfoSEC { + VkStructureType sType; + const void* pNext; + uint64_t firstDrawTimestamp; + uint64_t swapBufferTimestamp; +} VkAmigoProfilingSubmitInfoSEC; + + + +// VK_QCOM_multiview_per_view_viewports is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_multiview_per_view_viewports 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME "VK_QCOM_multiview_per_view_viewports" +typedef struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 multiviewPerViewViewports; +} VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + + +// VK_NV_ray_tracing_invocation_reorder is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_invocation_reorder 1 +#define VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME "VK_NV_ray_tracing_invocation_reorder" + +typedef enum VkRayTracingInvocationReorderModeNV { + VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV = 0, + VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV = 1, + VK_RAY_TRACING_INVOCATION_REORDER_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingInvocationReorderModeNV; +typedef struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV { + VkStructureType sType; + void* pNext; + VkRayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint; +} VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; + +typedef struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingInvocationReorder; +} VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + + +// VK_NV_extended_sparse_address_space is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_extended_sparse_address_space 1 +#define VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION 1 +#define VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_EXTENSION_NAME "VK_NV_extended_sparse_address_space" +typedef struct VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 extendedSparseAddressSpace; +} VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + +typedef struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV { + VkStructureType sType; + void* pNext; + VkDeviceSize extendedSparseAddressSpaceSize; + VkImageUsageFlags extendedSparseImageUsageFlags; + VkBufferUsageFlags extendedSparseBufferUsageFlags; +} VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + + +// VK_EXT_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_mutable_descriptor_type 1 +#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type" + + +// VK_EXT_layer_settings is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_layer_settings 1 +#define VK_EXT_LAYER_SETTINGS_SPEC_VERSION 2 +#define VK_EXT_LAYER_SETTINGS_EXTENSION_NAME "VK_EXT_layer_settings" + +typedef enum VkLayerSettingTypeEXT { + VK_LAYER_SETTING_TYPE_BOOL32_EXT = 0, + VK_LAYER_SETTING_TYPE_INT32_EXT = 1, + VK_LAYER_SETTING_TYPE_INT64_EXT = 2, + VK_LAYER_SETTING_TYPE_UINT32_EXT = 3, + VK_LAYER_SETTING_TYPE_UINT64_EXT = 4, + VK_LAYER_SETTING_TYPE_FLOAT32_EXT = 5, + VK_LAYER_SETTING_TYPE_FLOAT64_EXT = 6, + VK_LAYER_SETTING_TYPE_STRING_EXT = 7, + VK_LAYER_SETTING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkLayerSettingTypeEXT; +typedef struct VkLayerSettingEXT { + const char* pLayerName; + const char* pSettingName; + VkLayerSettingTypeEXT type; + uint32_t valueCount; + const void* pValues; +} VkLayerSettingEXT; + +typedef struct VkLayerSettingsCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t settingCount; + const VkLayerSettingEXT* pSettings; +} VkLayerSettingsCreateInfoEXT; + + + +// VK_ARM_shader_core_builtins is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_shader_core_builtins 1 +#define VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION 2 +#define VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME "VK_ARM_shader_core_builtins" +typedef struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 shaderCoreBuiltins; +} VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM; + +typedef struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM { + VkStructureType sType; + void* pNext; + uint64_t shaderCoreMask; + uint32_t shaderCoreCount; + uint32_t shaderWarpsPerCore; +} VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + + +// VK_EXT_pipeline_library_group_handles is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_library_group_handles 1 +#define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME "VK_EXT_pipeline_library_group_handles" +typedef struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineLibraryGroupHandles; +} VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + + +// VK_EXT_dynamic_rendering_unused_attachments is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_dynamic_rendering_unused_attachments 1 +#define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION 1 +#define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME "VK_EXT_dynamic_rendering_unused_attachments" +typedef struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRenderingUnusedAttachments; +} VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + + +// VK_NV_low_latency2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_low_latency2 1 +#define VK_NV_LOW_LATENCY_2_SPEC_VERSION 2 +#define VK_NV_LOW_LATENCY_2_EXTENSION_NAME "VK_NV_low_latency2" + +typedef enum VkLatencyMarkerNV { + VK_LATENCY_MARKER_SIMULATION_START_NV = 0, + VK_LATENCY_MARKER_SIMULATION_END_NV = 1, + VK_LATENCY_MARKER_RENDERSUBMIT_START_NV = 2, + VK_LATENCY_MARKER_RENDERSUBMIT_END_NV = 3, + VK_LATENCY_MARKER_PRESENT_START_NV = 4, + VK_LATENCY_MARKER_PRESENT_END_NV = 5, + VK_LATENCY_MARKER_INPUT_SAMPLE_NV = 6, + VK_LATENCY_MARKER_TRIGGER_FLASH_NV = 7, + VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV = 8, + VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV = 9, + VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV = 10, + VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV = 11, + VK_LATENCY_MARKER_MAX_ENUM_NV = 0x7FFFFFFF +} VkLatencyMarkerNV; + +typedef enum VkOutOfBandQueueTypeNV { + VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV = 0, + VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV = 1, + VK_OUT_OF_BAND_QUEUE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkOutOfBandQueueTypeNV; +typedef struct VkLatencySleepModeInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 lowLatencyMode; + VkBool32 lowLatencyBoost; + uint32_t minimumIntervalUs; +} VkLatencySleepModeInfoNV; + +typedef struct VkLatencySleepInfoNV { + VkStructureType sType; + const void* pNext; + VkSemaphore signalSemaphore; + uint64_t value; +} VkLatencySleepInfoNV; + +typedef struct VkSetLatencyMarkerInfoNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; + VkLatencyMarkerNV marker; +} VkSetLatencyMarkerInfoNV; + +typedef struct VkLatencyTimingsFrameReportNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; + uint64_t inputSampleTimeUs; + uint64_t simStartTimeUs; + uint64_t simEndTimeUs; + uint64_t renderSubmitStartTimeUs; + uint64_t renderSubmitEndTimeUs; + uint64_t presentStartTimeUs; + uint64_t presentEndTimeUs; + uint64_t driverStartTimeUs; + uint64_t driverEndTimeUs; + uint64_t osRenderQueueStartTimeUs; + uint64_t osRenderQueueEndTimeUs; + uint64_t gpuRenderStartTimeUs; + uint64_t gpuRenderEndTimeUs; +} VkLatencyTimingsFrameReportNV; + +typedef struct VkGetLatencyMarkerInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t timingCount; + VkLatencyTimingsFrameReportNV* pTimings; +} VkGetLatencyMarkerInfoNV; + +typedef struct VkLatencySubmissionPresentIdNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; +} VkLatencySubmissionPresentIdNV; + +typedef struct VkSwapchainLatencyCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 latencyModeEnable; +} VkSwapchainLatencyCreateInfoNV; + +typedef struct VkOutOfBandQueueTypeInfoNV { + VkStructureType sType; + const void* pNext; + VkOutOfBandQueueTypeNV queueType; +} VkOutOfBandQueueTypeInfoNV; + +typedef struct VkLatencySurfaceCapabilitiesNV { + VkStructureType sType; + const void* pNext; + uint32_t presentModeCount; + VkPresentModeKHR* pPresentModes; +} VkLatencySurfaceCapabilitiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkSetLatencySleepModeNV)(VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV* pSleepModeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkLatencySleepNV)(VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV* pSleepInfo); +typedef void (VKAPI_PTR *PFN_vkSetLatencyMarkerNV)(VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkGetLatencyTimingsNV)(VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkQueueNotifyOutOfBandNV)(VkQueue queue, const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetLatencySleepModeNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkLatencySleepModeInfoNV* pSleepModeInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkLatencySleepNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkLatencySleepInfoNV* pSleepInfo); + +VKAPI_ATTR void VKAPI_CALL vkSetLatencyMarkerNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetLatencyTimingsNV( + VkDevice device, + VkSwapchainKHR swapchain, + VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueNotifyOutOfBandNV( + VkQueue queue, + const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo); +#endif + + +// VK_QCOM_multiview_per_view_render_areas is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_multiview_per_view_render_areas 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME "VK_QCOM_multiview_per_view_render_areas" +typedef struct VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 multiviewPerViewRenderAreas; +} VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + +typedef struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM { + VkStructureType sType; + const void* pNext; + uint32_t perViewRenderAreaCount; + const VkRect2D* pPerViewRenderAreas; +} VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + + +// VK_NV_per_stage_descriptor_set is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_per_stage_descriptor_set 1 +#define VK_NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION 1 +#define VK_NV_PER_STAGE_DESCRIPTOR_SET_EXTENSION_NAME "VK_NV_per_stage_descriptor_set" +typedef struct VkPhysicalDevicePerStageDescriptorSetFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 perStageDescriptorSet; + VkBool32 dynamicPipelineLayout; +} VkPhysicalDevicePerStageDescriptorSetFeaturesNV; + + + +// VK_QCOM_image_processing2 is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_image_processing2 1 +#define VK_QCOM_IMAGE_PROCESSING_2_SPEC_VERSION 1 +#define VK_QCOM_IMAGE_PROCESSING_2_EXTENSION_NAME "VK_QCOM_image_processing2" + +typedef enum VkBlockMatchWindowCompareModeQCOM { + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM = 0, + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM = 1, + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkBlockMatchWindowCompareModeQCOM; +typedef struct VkPhysicalDeviceImageProcessing2FeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 textureBlockMatch2; +} VkPhysicalDeviceImageProcessing2FeaturesQCOM; + +typedef struct VkPhysicalDeviceImageProcessing2PropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent2D maxBlockMatchWindow; +} VkPhysicalDeviceImageProcessing2PropertiesQCOM; + +typedef struct VkSamplerBlockMatchWindowCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkExtent2D windowExtent; + VkBlockMatchWindowCompareModeQCOM windowCompareMode; +} VkSamplerBlockMatchWindowCreateInfoQCOM; + + + +// VK_QCOM_filter_cubic_weights is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_filter_cubic_weights 1 +#define VK_QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION 1 +#define VK_QCOM_FILTER_CUBIC_WEIGHTS_EXTENSION_NAME "VK_QCOM_filter_cubic_weights" + +typedef enum VkCubicFilterWeightsQCOM { + VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM = 0, + VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM = 1, + VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM = 2, + VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM = 3, + VK_CUBIC_FILTER_WEIGHTS_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkCubicFilterWeightsQCOM; +typedef struct VkPhysicalDeviceCubicWeightsFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 selectableCubicWeights; +} VkPhysicalDeviceCubicWeightsFeaturesQCOM; + +typedef struct VkSamplerCubicWeightsCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkCubicFilterWeightsQCOM cubicWeights; +} VkSamplerCubicWeightsCreateInfoQCOM; + +typedef struct VkBlitImageCubicWeightsInfoQCOM { + VkStructureType sType; + const void* pNext; + VkCubicFilterWeightsQCOM cubicWeights; +} VkBlitImageCubicWeightsInfoQCOM; + + + +// VK_QCOM_ycbcr_degamma is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_ycbcr_degamma 1 +#define VK_QCOM_YCBCR_DEGAMMA_SPEC_VERSION 1 +#define VK_QCOM_YCBCR_DEGAMMA_EXTENSION_NAME "VK_QCOM_ycbcr_degamma" +typedef struct VkPhysicalDeviceYcbcrDegammaFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 ycbcrDegamma; +} VkPhysicalDeviceYcbcrDegammaFeaturesQCOM; + +typedef struct VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM { + VkStructureType sType; + void* pNext; + VkBool32 enableYDegamma; + VkBool32 enableCbCrDegamma; +} VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + + +// VK_QCOM_filter_cubic_clamp is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_filter_cubic_clamp 1 +#define VK_QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION 1 +#define VK_QCOM_FILTER_CUBIC_CLAMP_EXTENSION_NAME "VK_QCOM_filter_cubic_clamp" +typedef struct VkPhysicalDeviceCubicClampFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 cubicRangeClamp; +} VkPhysicalDeviceCubicClampFeaturesQCOM; + + + +// VK_EXT_attachment_feedback_loop_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_attachment_feedback_loop_dynamic_state 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_dynamic_state" +typedef struct VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 attachmentFeedbackLoopDynamicState; +} VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT)(VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetAttachmentFeedbackLoopEnableEXT( + VkCommandBuffer commandBuffer, + VkImageAspectFlags aspectMask); +#endif + + +// VK_MSFT_layered_driver is a preprocessor guard. Do not pass it to API calls. +#define VK_MSFT_layered_driver 1 +#define VK_MSFT_LAYERED_DRIVER_SPEC_VERSION 1 +#define VK_MSFT_LAYERED_DRIVER_EXTENSION_NAME "VK_MSFT_layered_driver" + +typedef enum VkLayeredDriverUnderlyingApiMSFT { + VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT = 0, + VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT = 1, + VK_LAYERED_DRIVER_UNDERLYING_API_MAX_ENUM_MSFT = 0x7FFFFFFF +} VkLayeredDriverUnderlyingApiMSFT; +typedef struct VkPhysicalDeviceLayeredDriverPropertiesMSFT { + VkStructureType sType; + void* pNext; + VkLayeredDriverUnderlyingApiMSFT underlyingAPI; +} VkPhysicalDeviceLayeredDriverPropertiesMSFT; + + + +// VK_NV_descriptor_pool_overallocation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_descriptor_pool_overallocation 1 +#define VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION 1 +#define VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME "VK_NV_descriptor_pool_overallocation" +typedef struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 descriptorPoolOverallocation; +} VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + + +// VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_acceleration_structure 1 +#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 +#define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure" + +typedef enum VkBuildAccelerationStructureModeKHR { + VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR = 0, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR = 1, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureModeKHR; + +typedef enum VkAccelerationStructureCreateFlagBitsKHR { + VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001, + VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008, + VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004, + VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCreateFlagBitsKHR; +typedef VkFlags VkAccelerationStructureCreateFlagsKHR; +typedef struct VkAccelerationStructureBuildRangeInfoKHR { + uint32_t primitiveCount; + uint32_t primitiveOffset; + uint32_t firstVertex; + uint32_t transformOffset; +} VkAccelerationStructureBuildRangeInfoKHR; + +typedef struct VkAccelerationStructureGeometryTrianglesDataKHR { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + uint32_t maxVertex; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceOrHostAddressConstKHR transformData; +} VkAccelerationStructureGeometryTrianglesDataKHR; + +typedef struct VkAccelerationStructureGeometryAabbsDataKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR data; + VkDeviceSize stride; +} VkAccelerationStructureGeometryAabbsDataKHR; + +typedef struct VkAccelerationStructureGeometryInstancesDataKHR { + VkStructureType sType; + const void* pNext; + VkBool32 arrayOfPointers; + VkDeviceOrHostAddressConstKHR data; +} VkAccelerationStructureGeometryInstancesDataKHR; + +typedef union VkAccelerationStructureGeometryDataKHR { + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +} VkAccelerationStructureGeometryDataKHR; + +typedef struct VkAccelerationStructureGeometryKHR { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkAccelerationStructureGeometryDataKHR geometry; + VkGeometryFlagsKHR flags; +} VkAccelerationStructureGeometryKHR; + +typedef struct VkAccelerationStructureBuildGeometryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeKHR type; + VkBuildAccelerationStructureFlagsKHR flags; + VkBuildAccelerationStructureModeKHR mode; + VkAccelerationStructureKHR srcAccelerationStructure; + VkAccelerationStructureKHR dstAccelerationStructure; + uint32_t geometryCount; + const VkAccelerationStructureGeometryKHR* pGeometries; + const VkAccelerationStructureGeometryKHR* const* ppGeometries; + VkDeviceOrHostAddressKHR scratchData; +} VkAccelerationStructureBuildGeometryInfoKHR; + +typedef struct VkAccelerationStructureCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureCreateFlagsKHR createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkAccelerationStructureTypeKHR type; + VkDeviceAddress deviceAddress; +} VkAccelerationStructureCreateInfoKHR; + +typedef struct VkWriteDescriptorSetAccelerationStructureKHR { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureKHR* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureKHR; + +typedef struct VkPhysicalDeviceAccelerationStructureFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 accelerationStructure; + VkBool32 accelerationStructureCaptureReplay; + VkBool32 accelerationStructureIndirectBuild; + VkBool32 accelerationStructureHostCommands; + VkBool32 descriptorBindingAccelerationStructureUpdateAfterBind; +} VkPhysicalDeviceAccelerationStructureFeaturesKHR; + +typedef struct VkPhysicalDeviceAccelerationStructurePropertiesKHR { + VkStructureType sType; + void* pNext; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxPrimitiveCount; + uint32_t maxPerStageDescriptorAccelerationStructures; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures; + uint32_t maxDescriptorSetAccelerationStructures; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures; + uint32_t minAccelerationStructureScratchOffsetAlignment; +} VkPhysicalDeviceAccelerationStructurePropertiesKHR; + +typedef struct VkAccelerationStructureDeviceAddressInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; +} VkAccelerationStructureDeviceAddressInfoKHR; + +typedef struct VkAccelerationStructureVersionInfoKHR { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkAccelerationStructureVersionInfoKHR; + +typedef struct VkCopyAccelerationStructureToMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkDeviceOrHostAddressKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureToMemoryInfoKHR; + +typedef struct VkCopyMemoryToAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyMemoryToAccelerationStructureInfoKHR; + +typedef struct VkCopyAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureInfoKHR; + +typedef struct VkAccelerationStructureBuildSizesInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize accelerationStructureSize; + VkDeviceSize updateScratchSize; + VkDeviceSize buildScratchSize; +} VkAccelerationStructureBuildSizesInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresIndirectKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts); +typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructuresKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureBuildSizesKHR)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR( + VkDevice device, + const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresIndirectKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkDeviceAddress* pIndirectDeviceAddresses, + const uint32_t* pIndirectStrides, + const uint32_t* const* ppMaxPrimitiveCounts); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR( + VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, + const uint32_t* pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +#endif + + +// VK_KHR_ray_tracing_pipeline is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_pipeline 1 +#define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline" + +typedef enum VkShaderGroupShaderKHR { + VK_SHADER_GROUP_SHADER_GENERAL_KHR = 0, + VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = 1, + VK_SHADER_GROUP_SHADER_ANY_HIT_KHR = 2, + VK_SHADER_GROUP_SHADER_INTERSECTION_KHR = 3, + VK_SHADER_GROUP_SHADER_MAX_ENUM_KHR = 0x7FFFFFFF +} VkShaderGroupShaderKHR; +typedef struct VkRayTracingShaderGroupCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; + const void* pShaderGroupCaptureReplayHandle; +} VkRayTracingShaderGroupCreateInfoKHR; + +typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxPipelineRayPayloadSize; + uint32_t maxPipelineRayHitAttributeSize; +} VkRayTracingPipelineInterfaceCreateInfoKHR; + +typedef struct VkRayTracingPipelineCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoKHR* pGroups; + uint32_t maxPipelineRayRecursionDepth; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPipeline; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplay; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed; + VkBool32 rayTracingPipelineTraceRaysIndirect; + VkBool32 rayTraversalPrimitiveCulling; +} VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRayRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint32_t shaderGroupHandleCaptureReplaySize; + uint32_t maxRayDispatchInvocationCount; + uint32_t shaderGroupHandleAlignment; + uint32_t maxRayHitAttributeSize; +} VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + +typedef struct VkStridedDeviceAddressRegionKHR { + VkDeviceAddress deviceAddress; + VkDeviceSize stride; + VkDeviceSize size; +} VkStridedDeviceAddressRegionKHR; + +typedef struct VkTraceRaysIndirectCommandKHR { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommandKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress); +typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupStackSizeKHR)(VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader); +typedef void (VKAPI_PTR *PFN_vkCmdSetRayTracingPipelineStackSizeKHR)(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress); + +VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetRayTracingShaderGroupStackSizeKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR( + VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize); +#endif + + +// VK_KHR_ray_query is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_query 1 +#define VK_KHR_RAY_QUERY_SPEC_VERSION 1 +#define VK_KHR_RAY_QUERY_EXTENSION_NAME "VK_KHR_ray_query" +typedef struct VkPhysicalDeviceRayQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayQuery; +} VkPhysicalDeviceRayQueryFeaturesKHR; + + + +// VK_EXT_mesh_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_mesh_shader 1 +#define VK_EXT_MESH_SHADER_SPEC_VERSION 1 +#define VK_EXT_MESH_SHADER_EXTENSION_NAME "VK_EXT_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; + VkBool32 multiviewMeshShader; + VkBool32 primitiveFragmentShadingRateMeshShader; + VkBool32 meshShaderQueries; +} VkPhysicalDeviceMeshShaderFeaturesEXT; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTaskWorkGroupTotalCount; + uint32_t maxTaskWorkGroupCount[3]; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskPayloadSize; + uint32_t maxTaskSharedMemorySize; + uint32_t maxTaskPayloadAndSharedMemorySize; + uint32_t maxMeshWorkGroupTotalCount; + uint32_t maxMeshWorkGroupCount[3]; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshSharedMemorySize; + uint32_t maxMeshPayloadAndSharedMemorySize; + uint32_t maxMeshOutputMemorySize; + uint32_t maxMeshPayloadAndOutputMemorySize; + uint32_t maxMeshOutputComponents; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshOutputLayers; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; + uint32_t maxPreferredTaskWorkGroupInvocations; + uint32_t maxPreferredMeshWorkGroupInvocations; + VkBool32 prefersLocalInvocationVertexOutput; + VkBool32 prefersLocalInvocationPrimitiveOutput; + VkBool32 prefersCompactVertexOutput; + VkBool32 prefersCompactPrimitiveOutput; +} VkPhysicalDeviceMeshShaderPropertiesEXT; + +typedef struct VkDrawMeshTasksIndirectCommandEXT { + uint32_t groupCountX; + uint32_t groupCountY; + uint32_t groupCountZ; +} VkDrawMeshTasksIndirectCommandEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksEXT)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksEXT( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectEXT( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountEXT( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_directfb.h b/MacroLibX/third_party/vulkan/vulkan_directfb.h new file mode 100644 index 0000000..1f11a08 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_directfb.h @@ -0,0 +1,55 @@ +#ifndef VULKAN_DIRECTFB_H_ +#define VULKAN_DIRECTFB_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_directfb_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_directfb_surface 1 +#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 +#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" +typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT; +typedef struct VkDirectFBSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDirectFBSurfaceCreateFlagsEXT flags; + IDirectFB* dfb; + IDirectFBSurface* surface; +} VkDirectFBSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT( + VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB* dfb); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_enums.hpp b/MacroLibX/third_party/vulkan/vulkan_enums.hpp new file mode 100644 index 0000000..42b8bef --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_enums.hpp @@ -0,0 +1,7330 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_ENUMS_HPP +#define VULKAN_ENUMS_HPP + +namespace VULKAN_HPP_NAMESPACE +{ + template + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = false; + }; + + template + class Flags + { + public: + using MaskType = typename std::underlying_type::type; + + // constructors + VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {} + + VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast( bit ) ) {} + + VULKAN_HPP_CONSTEXPR Flags( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {} + + // relational operators +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Flags const & ) const = default; +#else + VULKAN_HPP_CONSTEXPR bool operator<( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask < rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator<=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask <= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask > rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask >= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator==( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask == rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator!=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask != rhs.m_mask; + } +#endif + + // logical operator + VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT + { + return !m_mask; + } + + // bitwise operators + VULKAN_HPP_CONSTEXPR Flags operator&( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask & rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator|( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask | rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator^( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask ^ rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask ^ FlagTraits::allFlags.m_mask ); + } + + // assignment operators + VULKAN_HPP_CONSTEXPR_14 Flags & operator=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VULKAN_HPP_CONSTEXPR_14 Flags & operator|=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask |= rhs.m_mask; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Flags & operator&=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask &= rhs.m_mask; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Flags & operator^=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask ^= rhs.m_mask; + return *this; + } + + // cast operators + explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_mask; + } + + explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT + { + return m_mask; + } + +#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC ) + public: +#else + private: +#endif + MaskType m_mask; + }; + +#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + // relational operators only needed for pre C++20 + template + VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator>( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator>=( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<=( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator==( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator!=( bit ); + } +#endif + + // bitwise operators + template + VULKAN_HPP_CONSTEXPR Flags operator&( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator&( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator|( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator|( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator^( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator^( bit ); + } + + // bitwise operators on BitType + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator&( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) & rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator|( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) | rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator^( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) ^ rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator~( BitType bit ) VULKAN_HPP_NOEXCEPT + { + return ~( Flags( bit ) ); + } + + template + struct CppType + { + }; + + //============= + //=== ENUMs === + //============= + + //=== VK_VERSION_1_0 === + + enum class Result + { + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorUnknown = VK_ERROR_UNKNOWN, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorFragmentation = VK_ERROR_FRAGMENTATION, + eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorImageUsageNotSupportedKHR = VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR, + eErrorVideoPictureLayoutNotSupportedKHR = VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR, + eErrorVideoProfileOperationNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR, + eErrorVideoProfileFormatNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR, + eErrorVideoProfileCodecNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR, + eErrorVideoStdVersionNotSupportedKHR = VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, + eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR, + eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR, + eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT, + eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT + }; + + enum class StructureType + { + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, + ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, + ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, + ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, + eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES, + ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES, + ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, + ePhysicalDeviceShaderTerminateInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, + ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, + ePhysicalDeviceShaderDemoteToHelperInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, + ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, + eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, + ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, + ePhysicalDevicePipelineCreationCacheControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, + eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, + eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, + eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, + eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, + eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, + eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, + eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, + ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, + ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, + eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, + eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, + eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, + eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, + eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, + eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, + eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2, + eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2, + eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, + eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, + eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, + ePipelineShaderStageRequiredSubgroupSizeCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, + ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, + eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, + eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, + ePhysicalDeviceTextureCompressionAstcHdrFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO, + eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, + ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, + ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, + eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, + ePhysicalDeviceShaderIntegerDotProductFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, + ePhysicalDeviceShaderIntegerDotProductProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, + ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, + eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, + ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, + ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, + eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, + eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, + eVideoProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR, + eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR, + eVideoPictureResourceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR, + eVideoSessionMemoryRequirementsKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR, + eBindVideoSessionMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR, + eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR, + eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR, + eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR, + eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR, + eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR, + eVideoReferenceSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR, + eQueueFamilyVideoPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR, + eVideoProfileListInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR, + ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR, + eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR, + eQueueFamilyQueryResultStatusPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR, + eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR, + eVideoDecodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR, + eVideoDecodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR, + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, + ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX, + eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX, + eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX, + eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, + eVideoEncodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR, + eVideoEncodeH264SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoEncodeH264PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR, + eVideoEncodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR, + eVideoEncodeH264NaluSliceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR, + eVideoEncodeH264GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR, + eVideoEncodeH264ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR, + eVideoEncodeH264RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR, + eVideoEncodeH264RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeH264SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR, + eVideoEncodeH264QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeH264SessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeH264SessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + eVideoEncodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR, + eVideoEncodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoEncodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR, + eVideoEncodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR, + eVideoEncodeH265NaluSliceSegmentInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR, + eVideoEncodeH265GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR, + eVideoEncodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR, + eVideoEncodeH265RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR, + eVideoEncodeH265RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeH265SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR, + eVideoEncodeH265QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeH265SessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeH265SessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + eVideoDecodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR, + eVideoDecodeH264PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR, + eVideoDecodeH264ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR, + eVideoDecodeH264SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoDecodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR, + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, + eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR, + eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR, + ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR, + ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR, + eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR, + eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, + eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, + eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, + eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, +#if defined( VK_USE_PLATFORM_GGP ) + eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, +#if defined( VK_USE_PLATFORM_VI_NN ) + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, +#endif /*VK_USE_PLATFORM_VI_NN*/ + ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, + ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT, + ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT, + ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, + ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, + eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, + ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, + ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR, + ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR, + eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR, + ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR, + eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR, + ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR, + ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, + eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, + ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, + eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, + eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, + eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, + eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, + eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDeviceShaderEnqueueFeaturesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX, + ePhysicalDeviceShaderEnqueuePropertiesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX, + eExecutionGraphPipelineScratchSizeAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX, + eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX, + ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR, + eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR, + eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR, + eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR, + eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR, + eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR, + eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR, + eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR, + eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR, + eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR, + ePhysicalDeviceAccelerationStructureFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR, + ePhysicalDeviceAccelerationStructurePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR, + eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR, + eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR, + ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR, + ePhysicalDeviceRayTracingPipelinePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR, + eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR, + eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR, + eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR, + ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR, + ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, + ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, + ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, + eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, + ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, + eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, + eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, + eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + eDrmFormatModifierPropertiesList2EXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, + ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, + eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, + ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, + eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, + ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, + eVideoDecodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR, + eVideoDecodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR, + eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR, + eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR, + eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_GGP ) + ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, +#endif /*VK_USE_PLATFORM_GGP*/ + ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, + ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, + ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, + ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, + eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, + eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, + eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, + eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, + eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, + ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, + eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, + ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, + ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, + ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, + ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, + eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, + eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, + eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, + ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, + ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, + eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, + ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, + ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, + ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, + ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, + ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, + ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT, + ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, + ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, + eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, + eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, + ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, + ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, + eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, + eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, + ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, + ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, + ePhysicalDevicePresentWaitFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR, + ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, + eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, + ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, + eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, + ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, + ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, + ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT, + ePipelineRasterizationProvokingVertexStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT, + ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, + eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, + eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, + ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, + eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, + eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, + eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, + eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, + ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, + ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, + ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, + ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, + ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, + ePhysicalDeviceHostImageCopyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT, + ePhysicalDeviceHostImageCopyPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT, + eMemoryToImageCopyEXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT, + eImageToMemoryCopyEXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT, + eCopyImageToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT, + eCopyMemoryToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT, + eHostImageLayoutTransitionInfoEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT, + eCopyImageToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT, + eSubresourceHostMemcpySizeEXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT, + eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT, + eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR, + eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR, + ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT, + eSurfacePresentModeEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT, + eSurfacePresentScalingCapabilitiesEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT, + eSurfacePresentModeCompatibilityEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT, + ePhysicalDeviceSwapchainMaintenance1FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT, + eSwapchainPresentFenceInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT, + eSwapchainPresentModesCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT, + eSwapchainPresentModeInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT, + eSwapchainPresentScalingCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT, + eReleaseSwapchainImagesInfoEXT = VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT, + ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, + eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, + eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, + eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV, + eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV, + eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV, + eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV, + ePhysicalDeviceInheritedViewportScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV, + eCommandBufferInheritanceViewportScissorInfoNV = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV, + ePhysicalDeviceShaderIntegerDotProductFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR, + ePhysicalDeviceShaderIntegerDotProductPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR, + ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, + ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, + eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, + eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, + ePhysicalDeviceDepthBiasControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT, + eDepthBiasInfoEXT = VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT, + eDepthBiasRepresentationInfoEXT = VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT, + ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT, + eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT, + eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT, + ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT, + ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT, + eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT, + ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT, + ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, + ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePhysicalDevicePresentBarrierFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV, + eSurfaceCapabilitiesPresentBarrierNV = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV, + eSwapchainPresentBarrierCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV, + ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR, + ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR, + ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, + eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, + ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, + eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR, + eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR, + eVideoEncodeRateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR, + eVideoEncodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR, + eQueryPoolVideoEncodeFeedbackCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR, + ePhysicalDeviceVideoEncodeQualityLevelInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR, + eVideoEncodeQualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeQualityLevelInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR, + eVideoEncodeSessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeSessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eCudaModuleCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV, + eCudaFunctionCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV, + eCudaLaunchInfoNV = VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV, + ePhysicalDeviceCudaKernelLaunchFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV, + ePhysicalDeviceCudaKernelLaunchPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eQueryLowLatencySupportNV = VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT, + eExportMetalObjectsInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT, + eExportMetalDeviceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT, + eExportMetalCommandQueueInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT, + eExportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT, + eImportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT, + eExportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT, + eImportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT, + eExportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT, + eImportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT, + eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT, + eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR, + eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR, + eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR, + eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, + eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR, + eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR, + eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR, + ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR, + eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, + ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT, + ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT, + ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT, + eDescriptorAddressInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT, + eDescriptorGetInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT, + eBufferCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eImageCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eImageViewCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eSamplerCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eOpaqueCaptureDescriptorDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT, + eDescriptorBufferBindingInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT, + eDescriptorBufferBindingPushDescriptorBufferHandleEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT, + eAccelerationStructureCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT, + ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, + eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT, + ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD, + ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, + ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR, + ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, + ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV, + ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV, + eAccelerationStructureGeometryMotionTrianglesDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV, + ePhysicalDeviceRayTracingMotionBlurFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV, + eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV, + ePhysicalDeviceMeshShaderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT, + ePhysicalDeviceMeshShaderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT, + ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, + eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM, + ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT, + ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, + eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR, + eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR, + eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR, + eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR, + eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR, + eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR, + eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR, + eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR, + eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR, + eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR, + eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR, + ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT, + eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT, + eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, + eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, + eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT, + ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT, + ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT, + eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT, + eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT, + ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM, + ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, + eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, + ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, + eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT, + eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT, + ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT, + ePhysicalDeviceAddressBindingReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT, + eDeviceAddressBindingCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT, + ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT, + ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT, + ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT, + eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA, + eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA, + eMemoryGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA, + eImportSemaphoreZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA, + eSemaphoreGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA, + eBufferCollectionCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA, + eImportMemoryBufferCollectionFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA, + eBufferCollectionImageCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA, + eBufferCollectionPropertiesFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA, + eBufferConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA, + eBufferCollectionBufferCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA, + eImageConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA, + eImageFormatConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA, + eSysmemColorSpaceFUCHSIA = VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA, + eBufferCollectionConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eSubpassShadingPipelineCreateInfoHUAWEI = VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI, + ePhysicalDeviceSubpassShadingFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI, + ePhysicalDeviceSubpassShadingPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI, + ePhysicalDeviceInvocationMaskFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI, + eMemoryGetRemoteAddressInfoNV = VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV, + ePhysicalDeviceExternalMemoryRdmaFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV, + ePipelinePropertiesIdentifierEXT = VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT, + ePhysicalDevicePipelinePropertiesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT, + ePipelineInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT, + ePhysicalDeviceFrameBoundaryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT, + eFrameBoundaryEXT = VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT, + ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT, + eSubpassResolvePerformanceQueryEXT = VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT, + eMultisampledRenderToSingleSampledInfoEXT = VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT, + ePhysicalDeviceExtendedDynamicState2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT, + ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT, + ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT, + ePhysicalDeviceRayTracingMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR, + ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, + eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, + ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT, + eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT, + ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT, + ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT, + ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT, + ePhysicalDeviceShaderTileImageFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT, + ePhysicalDeviceShaderTileImagePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT, + eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT, + eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT, + eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT, + eCopyMicromapToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT, + eCopyMemoryToMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT, + ePhysicalDeviceOpacityMicromapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT, + ePhysicalDeviceOpacityMicromapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT, + eMicromapCreateInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT, + eMicromapBuildSizesInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT, + eAccelerationStructureTrianglesOpacityMicromapEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDeviceDisplacementMicromapFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV, + ePhysicalDeviceDisplacementMicromapPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV, + eAccelerationStructureTrianglesDisplacementMicromapNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI, + ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI, + ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI, + ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT, + eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT, + ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT, + ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR, + ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR, + eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR, + eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR, + ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM, + eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM, + ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM, + ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM, + ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT, + eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE, + eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE, + eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE, + ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT, + ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT, + ePhysicalDeviceRenderPassStripedFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM, + ePhysicalDeviceRenderPassStripedPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM, + eRenderPassStripeBeginInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM, + eRenderPassStripeInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM, + eRenderPassStripeSubmitInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM, + ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM, + ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM, + eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM, + ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV, + ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV, + ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV, + ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV, + ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV, + eComputePipelineIndirectBufferInfoNV = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV, + ePipelineIndirectDeviceAddressInfoNV = VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV, + ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, + ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT, + ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM, + ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM, + eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM, + ePhysicalDeviceNestedCommandBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT, + ePhysicalDeviceNestedCommandBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT, + eExternalMemoryAcquireUnmodifiedEXT = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT, + ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, + ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT, + eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT, + eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT, + eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT, + eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG, + eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG, + ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT, + ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT, + ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT, + eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT, + ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, + ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV, + ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV, + eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV, + eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV, + eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV, + eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV, + eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, + ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, + ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + ePhysicalDeviceExternalFormatResolveFeaturesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID, + ePhysicalDeviceExternalFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ePhysicalDeviceMaintenance5FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR, + ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR, + eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR, + eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR, + eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR, + eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR, + ePhysicalDeviceRayTracingPositionFetchFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR, + ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT, + ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT, + eShaderCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT, + eShaderRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM, + eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM, + ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC, + eAmigoProfilingSubmitInfoSEC = VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC, + ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM, + ePhysicalDeviceRayTracingInvocationReorderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV, + ePhysicalDeviceRayTracingInvocationReorderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV, + ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV, + ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV, + ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, + eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, + eLayerSettingsCreateInfoEXT = VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT, + ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM, + ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM, + ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT, + ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT, + eLatencySleepModeInfoNV = VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV, + eLatencySleepInfoNV = VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV, + eSetLatencyMarkerInfoNV = VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV, + eGetLatencyMarkerInfoNV = VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV, + eLatencyTimingsFrameReportNV = VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV, + eLatencySubmissionPresentIdNV = VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV, + eOutOfBandQueueTypeInfoNV = VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV, + eSwapchainLatencyCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV, + eLatencySurfaceCapabilitiesNV = VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV, + ePhysicalDeviceCooperativeMatrixFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR, + eCooperativeMatrixPropertiesKHR = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR, + ePhysicalDeviceCooperativeMatrixPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR, + ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM, + eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM, + ePhysicalDeviceVideoMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR, + eVideoInlineQueryInfoKHR = VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR, + ePhysicalDevicePerStageDescriptorSetFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV, + ePhysicalDeviceImageProcessing2FeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM, + ePhysicalDeviceImageProcessing2PropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM, + eSamplerBlockMatchWindowCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM, + eSamplerCubicWeightsCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM, + ePhysicalDeviceCubicWeightsFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM, + eBlitImageCubicWeightsInfoQCOM = VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM, + ePhysicalDeviceYcbcrDegammaFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM, + eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM, + ePhysicalDeviceCubicClampFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM, + ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT, + ePhysicalDeviceVertexAttributeDivisorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR, + ePipelineVertexInputDivisorStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, + ePhysicalDeviceVertexAttributeDivisorFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenBufferPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX, + eScreenBufferFormatPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX, + eImportScreenBufferInfoQNX = VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX, + eExternalFormatQNX = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX, + ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ePhysicalDeviceLayeredDriverPropertiesMSFT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT, + eCalibratedTimestampInfoKHR = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, + ePhysicalDeviceMaintenance6FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR, + ePhysicalDeviceMaintenance6PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR, + eBindMemoryStatusKHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR, + eBindDescriptorSetsInfoKHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR, + ePushConstantsInfoKHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR, + ePushDescriptorSetInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR, + ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, + eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, + eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, + ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV + }; + + enum class PipelineCacheHeaderVersion + { + eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE + }; + + enum class ObjectType + { + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + ePrivateDataSlot = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, + eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR, + eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR, + eCuModuleNVX = VK_OBJECT_TYPE_CU_MODULE_NVX, + eCuFunctionNVX = VK_OBJECT_TYPE_CU_FUNCTION_NVX, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, + ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, + eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, + eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, + ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eCudaModuleNV = VK_OBJECT_TYPE_CUDA_MODULE_NV, + eCudaFunctionNV = VK_OBJECT_TYPE_CUDA_FUNCTION_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT, + eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV, + eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT + }; + + enum class VendorId + { + eVIV = VK_VENDOR_ID_VIV, + eVSI = VK_VENDOR_ID_VSI, + eKazan = VK_VENDOR_ID_KAZAN, + eCodeplay = VK_VENDOR_ID_CODEPLAY, + eMESA = VK_VENDOR_ID_MESA, + ePocl = VK_VENDOR_ID_POCL, + eMobileye = VK_VENDOR_ID_MOBILEYE + }; + + enum class Format + { + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + eG8B8R82Plane444Unorm = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM, + eG10X6B10X6R10X62Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16, + eG12X4B12X4R12X42Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16, + eG16B16R162Plane444Unorm = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, + eA4R4G4B4UnormPack16 = VK_FORMAT_A4R4G4B4_UNORM_PACK16, + eA4B4G4R4UnormPack16 = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + eAstc4x4SfloatBlock = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, + eAstc5x4SfloatBlock = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, + eAstc5x5SfloatBlock = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, + eAstc6x5SfloatBlock = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK, + eAstc6x6SfloatBlock = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK, + eAstc8x5SfloatBlock = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK, + eAstc8x6SfloatBlock = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK, + eAstc8x8SfloatBlock = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK, + eAstc10x5SfloatBlock = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK, + eAstc10x6SfloatBlock = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK, + eAstc10x8SfloatBlock = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK, + eAstc10x10SfloatBlock = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK, + eAstc12x10SfloatBlock = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK, + eAstc12x12SfloatBlock = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, + eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, + eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, + eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, + eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, + eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, + eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, + eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, + eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, + eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, + eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, + eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, + eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, + eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, + eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, + eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT, + eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT, + eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT, + eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT, + eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, + eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, + eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV, + eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR, + eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR + }; + + enum class FormatFeatureFlagBits : VkFormatFeatureFlags + { + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR + }; + + using FormatFeatureFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags allFlags = + FormatFeatureFlagBits::eSampledImage | FormatFeatureFlagBits::eStorageImage | FormatFeatureFlagBits::eStorageImageAtomic | + FormatFeatureFlagBits::eUniformTexelBuffer | FormatFeatureFlagBits::eStorageTexelBuffer | FormatFeatureFlagBits::eStorageTexelBufferAtomic | + FormatFeatureFlagBits::eVertexBuffer | FormatFeatureFlagBits::eColorAttachment | FormatFeatureFlagBits::eColorAttachmentBlend | + FormatFeatureFlagBits::eDepthStencilAttachment | FormatFeatureFlagBits::eBlitSrc | FormatFeatureFlagBits::eBlitDst | + FormatFeatureFlagBits::eSampledImageFilterLinear | FormatFeatureFlagBits::eTransferSrc | FormatFeatureFlagBits::eTransferDst | + FormatFeatureFlagBits::eMidpointChromaSamples | FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter | + FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter | + FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit | + FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable | FormatFeatureFlagBits::eDisjoint | + FormatFeatureFlagBits::eCositedChromaSamples | FormatFeatureFlagBits::eSampledImageFilterMinmax | FormatFeatureFlagBits::eVideoDecodeOutputKHR | + FormatFeatureFlagBits::eVideoDecodeDpbKHR | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR | + FormatFeatureFlagBits::eSampledImageFilterCubicEXT | FormatFeatureFlagBits::eFragmentDensityMapEXT | + FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits::eVideoEncodeInputKHR | FormatFeatureFlagBits::eVideoEncodeDpbKHR; + }; + + enum class ImageCreateFlagBits : VkImageCreateFlags + { + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, + eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eMultisampledRenderToSingleSampledEXT = VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT, + e2DViewCompatibleEXT = VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT, + eFragmentDensityMapOffsetQCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM, + eVideoProfileIndependentKHR = VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR + }; + + using ImageCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCreateFlags allFlags = + ImageCreateFlagBits::eSparseBinding | ImageCreateFlagBits::eSparseResidency | ImageCreateFlagBits::eSparseAliased | ImageCreateFlagBits::eMutableFormat | + ImageCreateFlagBits::eCubeCompatible | ImageCreateFlagBits::eAlias | ImageCreateFlagBits::eSplitInstanceBindRegions | + ImageCreateFlagBits::e2DArrayCompatible | ImageCreateFlagBits::eBlockTexelViewCompatible | ImageCreateFlagBits::eExtendedUsage | + ImageCreateFlagBits::eProtected | ImageCreateFlagBits::eDisjoint | ImageCreateFlagBits::eCornerSampledNV | + ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT | ImageCreateFlagBits::eSubsampledEXT | ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT | ImageCreateFlagBits::e2DViewCompatibleEXT | + ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM | ImageCreateFlagBits::eVideoProfileIndependentKHR; + }; + + enum class ImageTiling + { + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR, + eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT + }; + + enum class ImageType + { + e1D = VK_IMAGE_TYPE_1D, + e2D = VK_IMAGE_TYPE_2D, + e3D = VK_IMAGE_TYPE_3D + }; + + enum class ImageUsageFlagBits : VkImageUsageFlags + { + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, + eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, + eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, + eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM, + eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM + }; + + using ImageUsageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageUsageFlags allFlags = + ImageUsageFlagBits::eTransferSrc | ImageUsageFlagBits::eTransferDst | ImageUsageFlagBits::eSampled | ImageUsageFlagBits::eStorage | + ImageUsageFlagBits::eColorAttachment | ImageUsageFlagBits::eDepthStencilAttachment | ImageUsageFlagBits::eTransientAttachment | + ImageUsageFlagBits::eInputAttachment | ImageUsageFlagBits::eVideoDecodeDstKHR | ImageUsageFlagBits::eVideoDecodeSrcKHR | + ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | + ImageUsageFlagBits::eHostTransferEXT | ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | + ImageUsageFlagBits::eVideoEncodeDpbKHR | ImageUsageFlagBits::eAttachmentFeedbackLoopEXT | ImageUsageFlagBits::eInvocationMaskHUAWEI | + ImageUsageFlagBits::eSampleWeightQCOM | ImageUsageFlagBits::eSampleBlockMatchQCOM; + }; + + enum class InstanceCreateFlagBits : VkInstanceCreateFlags + { + eEnumeratePortabilityKHR = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR + }; + + using InstanceCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR InstanceCreateFlags allFlags = InstanceCreateFlagBits::eEnumeratePortabilityKHR; + }; + + enum class InternalAllocationType + { + eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + }; + + enum class MemoryHeapFlagBits : VkMemoryHeapFlags + { + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR + }; + + using MemoryHeapFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryHeapFlags allFlags = MemoryHeapFlagBits::eDeviceLocal | MemoryHeapFlagBits::eMultiInstance; + }; + + enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags + { + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, + eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD, + eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD, + eRdmaCapableNV = VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV + }; + + using MemoryPropertyFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryPropertyFlags allFlags = + MemoryPropertyFlagBits::eDeviceLocal | MemoryPropertyFlagBits::eHostVisible | MemoryPropertyFlagBits::eHostCoherent | + MemoryPropertyFlagBits::eHostCached | MemoryPropertyFlagBits::eLazilyAllocated | MemoryPropertyFlagBits::eProtected | + MemoryPropertyFlagBits::eDeviceCoherentAMD | MemoryPropertyFlagBits::eDeviceUncachedAMD | MemoryPropertyFlagBits::eRdmaCapableNV; + }; + + enum class PhysicalDeviceType + { + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + }; + + enum class QueueFlagBits : VkQueueFlags + { + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, + eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, + eProtected = VK_QUEUE_PROTECTED_BIT, + eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR, + eOpticalFlowNV = VK_QUEUE_OPTICAL_FLOW_BIT_NV + }; + + using QueueFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueueFlags allFlags = QueueFlagBits::eGraphics | QueueFlagBits::eCompute | QueueFlagBits::eTransfer | + QueueFlagBits::eSparseBinding | QueueFlagBits::eProtected | QueueFlagBits::eVideoDecodeKHR | + QueueFlagBits::eVideoEncodeKHR | QueueFlagBits::eOpticalFlowNV; + }; + + enum class SampleCountFlagBits : VkSampleCountFlags + { + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, + e16 = VK_SAMPLE_COUNT_16_BIT, + e32 = VK_SAMPLE_COUNT_32_BIT, + e64 = VK_SAMPLE_COUNT_64_BIT + }; + + using SampleCountFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SampleCountFlags allFlags = SampleCountFlagBits::e1 | SampleCountFlagBits::e2 | SampleCountFlagBits::e4 | + SampleCountFlagBits::e8 | SampleCountFlagBits::e16 | SampleCountFlagBits::e32 | + SampleCountFlagBits::e64; + }; + + enum class SystemAllocationScope + { + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE + }; + + enum class DeviceCreateFlagBits : VkDeviceCreateFlags + { + }; + + using DeviceCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceCreateFlags allFlags = {}; + }; + + enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags + { + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + }; + + using DeviceQueueCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceQueueCreateFlags allFlags = DeviceQueueCreateFlagBits::eProtected; + }; + + enum class PipelineStageFlagBits : VkPipelineStageFlags + { + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eNone = VK_PIPELINE_STAGE_NONE, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR, + eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, + eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT + }; + + using PipelineStageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags allFlags = + PipelineStageFlagBits::eTopOfPipe | PipelineStageFlagBits::eDrawIndirect | PipelineStageFlagBits::eVertexInput | PipelineStageFlagBits::eVertexShader | + PipelineStageFlagBits::eTessellationControlShader | PipelineStageFlagBits::eTessellationEvaluationShader | PipelineStageFlagBits::eGeometryShader | + PipelineStageFlagBits::eFragmentShader | PipelineStageFlagBits::eEarlyFragmentTests | PipelineStageFlagBits::eLateFragmentTests | + PipelineStageFlagBits::eColorAttachmentOutput | PipelineStageFlagBits::eComputeShader | PipelineStageFlagBits::eTransfer | + PipelineStageFlagBits::eBottomOfPipe | PipelineStageFlagBits::eHost | PipelineStageFlagBits::eAllGraphics | PipelineStageFlagBits::eAllCommands | + PipelineStageFlagBits::eNone | PipelineStageFlagBits::eTransformFeedbackEXT | PipelineStageFlagBits::eConditionalRenderingEXT | + PipelineStageFlagBits::eAccelerationStructureBuildKHR | PipelineStageFlagBits::eRayTracingShaderKHR | PipelineStageFlagBits::eFragmentDensityProcessEXT | + PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits::eCommandPreprocessNV | PipelineStageFlagBits::eTaskShaderEXT | + PipelineStageFlagBits::eMeshShaderEXT; + }; + + enum class MemoryMapFlagBits : VkMemoryMapFlags + { + }; + + using MemoryMapFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags = {}; + }; + + enum class ImageAspectFlagBits : VkImageAspectFlags + { + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + eNone = VK_IMAGE_ASPECT_NONE, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, + eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, + eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, + eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, + eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, + eNoneKHR = VK_IMAGE_ASPECT_NONE_KHR + }; + + using ImageAspectFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageAspectFlags allFlags = ImageAspectFlagBits::eColor | ImageAspectFlagBits::eDepth | ImageAspectFlagBits::eStencil | + ImageAspectFlagBits::eMetadata | ImageAspectFlagBits::ePlane0 | + ImageAspectFlagBits::ePlane1 | ImageAspectFlagBits::ePlane2 | ImageAspectFlagBits::eNone | + ImageAspectFlagBits::eMemoryPlane0EXT | ImageAspectFlagBits::eMemoryPlane1EXT | + ImageAspectFlagBits::eMemoryPlane2EXT | ImageAspectFlagBits::eMemoryPlane3EXT; + }; + + enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags + { + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT + }; + + using SparseImageFormatFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SparseImageFormatFlags allFlags = + SparseImageFormatFlagBits::eSingleMiptail | SparseImageFormatFlagBits::eAlignedMipSize | SparseImageFormatFlagBits::eNonstandardBlockSize; + }; + + enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags + { + eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT + }; + + using SparseMemoryBindFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SparseMemoryBindFlags allFlags = SparseMemoryBindFlagBits::eMetadata; + }; + + enum class FenceCreateFlagBits : VkFenceCreateFlags + { + eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + }; + + using FenceCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FenceCreateFlags allFlags = FenceCreateFlagBits::eSignaled; + }; + + enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags + { + }; + + using SemaphoreCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreCreateFlags allFlags = {}; + }; + + enum class EventCreateFlagBits : VkEventCreateFlags + { + eDeviceOnly = VK_EVENT_CREATE_DEVICE_ONLY_BIT, + eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR + }; + + using EventCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR EventCreateFlags allFlags = EventCreateFlagBits::eDeviceOnly; + }; + + enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags + { + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT, + eTaskShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT, + eMeshShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT, + eClusterCullingShaderInvocationsHUAWEI = VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI + }; + + using QueryPipelineStatisticFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPipelineStatisticFlags allFlags = + QueryPipelineStatisticFlagBits::eInputAssemblyVertices | QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives | + QueryPipelineStatisticFlagBits::eVertexShaderInvocations | QueryPipelineStatisticFlagBits::eGeometryShaderInvocations | + QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives | QueryPipelineStatisticFlagBits::eClippingInvocations | + QueryPipelineStatisticFlagBits::eClippingPrimitives | QueryPipelineStatisticFlagBits::eFragmentShaderInvocations | + QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches | QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations | + QueryPipelineStatisticFlagBits::eComputeShaderInvocations | QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT | + QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT | QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI; + }; + + enum class QueryResultFlagBits : VkQueryResultFlags + { + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, + eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + ePartial = VK_QUERY_RESULT_PARTIAL_BIT, + eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR + }; + + using QueryResultFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryResultFlags allFlags = QueryResultFlagBits::e64 | QueryResultFlagBits::eWait | + QueryResultFlagBits::eWithAvailability | QueryResultFlagBits::ePartial | + QueryResultFlagBits::eWithStatusKHR; + }; + + enum class QueryType + { + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, + eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR, + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, + eVideoEncodeFeedbackKHR = VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR, + eMeshPrimitivesGeneratedEXT = VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT, + ePrimitivesGeneratedEXT = VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT, + eAccelerationStructureSerializationBottomLevelPointersKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR, + eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR, + eMicromapSerializationSizeEXT = VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT, + eMicromapCompactedSizeEXT = VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT + }; + + enum class QueryPoolCreateFlagBits : VkQueryPoolCreateFlags + { + }; + + using QueryPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPoolCreateFlags allFlags = {}; + }; + + enum class BufferCreateFlagBits : VkBufferCreateFlags + { + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, + eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, + eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, + eDescriptorBufferCaptureReplayEXT = VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eVideoProfileIndependentKHR = VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR + }; + + using BufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferCreateFlags allFlags = + BufferCreateFlagBits::eSparseBinding | BufferCreateFlagBits::eSparseResidency | BufferCreateFlagBits::eSparseAliased | BufferCreateFlagBits::eProtected | + BufferCreateFlagBits::eDeviceAddressCaptureReplay | BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + BufferCreateFlagBits::eVideoProfileIndependentKHR; + }; + + enum class BufferUsageFlagBits : VkBufferUsageFlags + { + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, + eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, + eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR, + eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, + eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, + ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, + eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, + eMicromapStorageEXT = VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT + }; + + using BufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags allFlags = + BufferUsageFlagBits::eTransferSrc | BufferUsageFlagBits::eTransferDst | BufferUsageFlagBits::eUniformTexelBuffer | + BufferUsageFlagBits::eStorageTexelBuffer | BufferUsageFlagBits::eUniformBuffer | BufferUsageFlagBits::eStorageBuffer | BufferUsageFlagBits::eIndexBuffer | + BufferUsageFlagBits::eVertexBuffer | BufferUsageFlagBits::eIndirectBuffer | BufferUsageFlagBits::eShaderDeviceAddress | + BufferUsageFlagBits::eVideoDecodeSrcKHR | BufferUsageFlagBits::eVideoDecodeDstKHR | BufferUsageFlagBits::eTransformFeedbackBufferEXT | + BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits::eExecutionGraphScratchAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR | + BufferUsageFlagBits::eShaderBindingTableKHR | BufferUsageFlagBits::eVideoEncodeDstKHR | BufferUsageFlagBits::eVideoEncodeSrcKHR | + BufferUsageFlagBits::eSamplerDescriptorBufferEXT | BufferUsageFlagBits::eResourceDescriptorBufferEXT | + BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT | BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits::eMicromapStorageEXT; + }; + + enum class SharingMode + { + eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eConcurrent = VK_SHARING_MODE_CONCURRENT + }; + + enum class BufferViewCreateFlagBits : VkBufferViewCreateFlags + { + }; + + using BufferViewCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferViewCreateFlags allFlags = {}; + }; + + enum class ImageLayout + { + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + eReadOnlyOptimal = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, + eAttachmentOptimal = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, + eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, + eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, + eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, + eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, + eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT + }; + + enum class ComponentSwizzle + { + eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A + }; + + enum class ImageViewCreateFlagBits : VkImageViewCreateFlags + { + eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT + }; + + using ImageViewCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageViewCreateFlags allFlags = ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT | + ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT; + }; + + enum class ImageViewType + { + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + }; + + enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags + { + }; + + using ShaderModuleCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderModuleCreateFlags allFlags = {}; + }; + + enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags + { + eExternallySynchronized = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, + eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT + }; + + using PipelineCacheCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCacheCreateFlags allFlags = PipelineCacheCreateFlagBits::eExternallySynchronized; + }; + + enum class BlendFactor + { + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + }; + + enum class BlendOp + { + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT + }; + + enum class ColorComponentFlagBits : VkColorComponentFlags + { + eR = VK_COLOR_COMPONENT_R_BIT, + eG = VK_COLOR_COMPONENT_G_BIT, + eB = VK_COLOR_COMPONENT_B_BIT, + eA = VK_COLOR_COMPONENT_A_BIT + }; + + using ColorComponentFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ColorComponentFlags allFlags = + ColorComponentFlagBits::eR | ColorComponentFlagBits::eG | ColorComponentFlagBits::eB | ColorComponentFlagBits::eA; + }; + + enum class CompareOp + { + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, + eAlways = VK_COMPARE_OP_ALWAYS + }; + + enum class CullModeFlagBits : VkCullModeFlags + { + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, + eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK + }; + + using CullModeFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CullModeFlags allFlags = + CullModeFlagBits::eNone | CullModeFlagBits::eFront | CullModeFlagBits::eBack | CullModeFlagBits::eFrontAndBack; + }; + + enum class DynamicState + { + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eCullMode = VK_DYNAMIC_STATE_CULL_MODE, + eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE, + ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, + eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, + eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, + eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, + eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, + eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, + eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, + eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, + eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, + eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP, + eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, + eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, + ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eDiscardRectangleEnableEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT, + eDiscardRectangleModeEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR, + eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorEnableNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, + eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, + eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, + ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, + eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, + eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, + eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, + eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, + eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, + eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, + eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, + eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, + eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT, + eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, + ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT, + eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT, + eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, + eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT, + ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, + eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT, + eTessellationDomainOriginEXT = VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT, + eDepthClampEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT, + ePolygonModeEXT = VK_DYNAMIC_STATE_POLYGON_MODE_EXT, + eRasterizationSamplesEXT = VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT, + eSampleMaskEXT = VK_DYNAMIC_STATE_SAMPLE_MASK_EXT, + eAlphaToCoverageEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT, + eAlphaToOneEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT, + eLogicOpEnableEXT = VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT, + eColorBlendEnableEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT, + eColorBlendEquationEXT = VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT, + eColorWriteMaskEXT = VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, + eRasterizationStreamEXT = VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT, + eConservativeRasterizationModeEXT = VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT, + eExtraPrimitiveOverestimationSizeEXT = VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT, + eDepthClipEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT, + eSampleLocationsEnableEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT, + eColorBlendAdvancedEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT, + eProvokingVertexModeEXT = VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT, + eLineRasterizationModeEXT = VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT, + eLineStippleEnableEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT, + eDepthClipNegativeOneToOneEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT, + eViewportWScalingEnableNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV, + eViewportSwizzleNV = VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV, + eCoverageToColorEnableNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV, + eCoverageToColorLocationNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV, + eCoverageModulationModeNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV, + eCoverageModulationTableEnableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV, + eCoverageModulationTableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV, + eShadingRateImageEnableNV = VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV, + eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV, + eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV, + eAttachmentFeedbackLoopEnableEXT = VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT + }; + + enum class FrontFace + { + eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, + eClockwise = VK_FRONT_FACE_CLOCKWISE + }; + + enum class LogicOp + { + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET + }; + + enum class PipelineCreateFlagBits : VkPipelineCreateFlags + { + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, + eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eDescriptorBufferEXT = VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, + eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, + eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT, + eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV, + eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT + }; + + using PipelineCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags allFlags = + PipelineCreateFlagBits::eDisableOptimization | PipelineCreateFlagBits::eAllowDerivatives | PipelineCreateFlagBits::eDerivative | + PipelineCreateFlagBits::eViewIndexFromDeviceIndex | PipelineCreateFlagBits::eDispatchBase | PipelineCreateFlagBits::eFailOnPipelineCompileRequired | + PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | + PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | + PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR | + PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR | PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR | + PipelineCreateFlagBits::eRayTracingSkipAabbsKHR | PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR | + PipelineCreateFlagBits::eDeferCompileNV | PipelineCreateFlagBits::eCaptureStatisticsKHR | PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | + PipelineCreateFlagBits::eIndirectBindableNV | PipelineCreateFlagBits::eLibraryKHR | PipelineCreateFlagBits::eDescriptorBufferEXT | + PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits::eLinkTimeOptimizationEXT | + PipelineCreateFlagBits::eRayTracingAllowMotionNV | PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | PipelineCreateFlagBits::eNoProtectedAccessEXT | PipelineCreateFlagBits::eProtectedAccessOnlyEXT; + }; + + enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags + { + eAllowVaryingSubgroupSize = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, + eRequireFullSubgroups = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, + eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT + }; + + using PipelineShaderStageCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineShaderStageCreateFlags allFlags = + PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize | PipelineShaderStageCreateFlagBits::eRequireFullSubgroups; + }; + + enum class PolygonMode + { + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, + eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV + }; + + enum class PrimitiveTopology + { + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + }; + + enum class ShaderStageFlagBits : VkShaderStageFlags + { + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL, + eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, + eTaskEXT = VK_SHADER_STAGE_TASK_BIT_EXT, + eMeshEXT = VK_SHADER_STAGE_MESH_BIT_EXT, + eSubpassShadingHUAWEI = VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI, + eClusterCullingHUAWEI = VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI + }; + + using ShaderStageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderStageFlags allFlags = + ShaderStageFlagBits::eVertex | ShaderStageFlagBits::eTessellationControl | ShaderStageFlagBits::eTessellationEvaluation | ShaderStageFlagBits::eGeometry | + ShaderStageFlagBits::eFragment | ShaderStageFlagBits::eCompute | ShaderStageFlagBits::eAllGraphics | ShaderStageFlagBits::eAll | + ShaderStageFlagBits::eRaygenKHR | ShaderStageFlagBits::eAnyHitKHR | ShaderStageFlagBits::eClosestHitKHR | ShaderStageFlagBits::eMissKHR | + ShaderStageFlagBits::eIntersectionKHR | ShaderStageFlagBits::eCallableKHR | ShaderStageFlagBits::eTaskEXT | ShaderStageFlagBits::eMeshEXT | + ShaderStageFlagBits::eSubpassShadingHUAWEI | ShaderStageFlagBits::eClusterCullingHUAWEI; + }; + + enum class StencilOp + { + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, + eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + }; + + enum class VertexInputRate + { + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + }; + + enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags + { + eRasterizationOrderAttachmentAccessARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentAccessEXT = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT + }; + + using PipelineColorBlendStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineColorBlendStateCreateFlags allFlags = + PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT; + }; + + enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags + { + eRasterizationOrderAttachmentDepthAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentStencilAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentDepthAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentStencilAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT + }; + + using PipelineDepthStencilStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDepthStencilStateCreateFlags allFlags = + PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT | + PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT; + }; + + enum class PipelineDynamicStateCreateFlagBits : VkPipelineDynamicStateCreateFlags + { + }; + + using PipelineDynamicStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDynamicStateCreateFlags allFlags = {}; + }; + + enum class PipelineInputAssemblyStateCreateFlagBits : VkPipelineInputAssemblyStateCreateFlags + { + }; + + using PipelineInputAssemblyStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineInputAssemblyStateCreateFlags allFlags = {}; + }; + + enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags + { + eIndependentSetsEXT = VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT + }; + + using PipelineLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineLayoutCreateFlags allFlags = PipelineLayoutCreateFlagBits::eIndependentSetsEXT; + }; + + enum class PipelineMultisampleStateCreateFlagBits : VkPipelineMultisampleStateCreateFlags + { + }; + + using PipelineMultisampleStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineMultisampleStateCreateFlags allFlags = {}; + }; + + enum class PipelineRasterizationStateCreateFlagBits : VkPipelineRasterizationStateCreateFlags + { + }; + + using PipelineRasterizationStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateCreateFlags allFlags = {}; + }; + + enum class PipelineTessellationStateCreateFlagBits : VkPipelineTessellationStateCreateFlags + { + }; + + using PipelineTessellationStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineTessellationStateCreateFlags allFlags = {}; + }; + + enum class PipelineVertexInputStateCreateFlagBits : VkPipelineVertexInputStateCreateFlags + { + }; + + using PipelineVertexInputStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineVertexInputStateCreateFlags allFlags = {}; + }; + + enum class PipelineViewportStateCreateFlagBits : VkPipelineViewportStateCreateFlags + { + }; + + using PipelineViewportStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportStateCreateFlags allFlags = {}; + }; + + enum class BorderColor + { + eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT, + eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT + }; + + enum class Filter + { + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, + eCubicIMG = VK_FILTER_CUBIC_IMG, + eCubicEXT = VK_FILTER_CUBIC_EXT + }; + + enum class SamplerAddressMode + { + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR + }; + + enum class SamplerCreateFlagBits : VkSamplerCreateFlags + { + eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, + eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eNonSeamlessCubeMapEXT = VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT, + eImageProcessingQCOM = VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM + }; + + using SamplerCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SamplerCreateFlags allFlags = + SamplerCreateFlagBits::eSubsampledEXT | SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT | + SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT | SamplerCreateFlagBits::eNonSeamlessCubeMapEXT | SamplerCreateFlagBits::eImageProcessingQCOM; + }; + + enum class SamplerMipmapMode + { + eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + }; + + enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags + { + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT, + eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE, + eHostOnlyEXT = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT, + eAllowOverallocationSetsNV = VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV, + eAllowOverallocationPoolsNV = VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV + }; + + using DescriptorPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolCreateFlags allFlags = + DescriptorPoolCreateFlagBits::eFreeDescriptorSet | DescriptorPoolCreateFlagBits::eUpdateAfterBind | DescriptorPoolCreateFlagBits::eHostOnlyEXT | + DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV | DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV; + }; + + enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags + { + eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT, + eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, + eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT, + eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, + eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV, + eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, + ePerStageNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV + }; + + using DescriptorSetLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags = + DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool | DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR | + DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT | + DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT | + DescriptorSetLayoutCreateFlagBits::ePerStageNV; + }; + + enum class DescriptorType + { + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlock = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, + eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, + eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM, + eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM, + eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT + }; + + enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags + { + }; + + using DescriptorPoolResetFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolResetFlags allFlags = {}; + }; + + enum class AccessFlagBits : VkAccessFlags + { + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eNone = VK_ACCESS_NONE, + eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, + eNoneKHR = VK_ACCESS_NONE_KHR + }; + + using AccessFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags allFlags = + AccessFlagBits::eIndirectCommandRead | AccessFlagBits::eIndexRead | AccessFlagBits::eVertexAttributeRead | AccessFlagBits::eUniformRead | + AccessFlagBits::eInputAttachmentRead | AccessFlagBits::eShaderRead | AccessFlagBits::eShaderWrite | AccessFlagBits::eColorAttachmentRead | + AccessFlagBits::eColorAttachmentWrite | AccessFlagBits::eDepthStencilAttachmentRead | AccessFlagBits::eDepthStencilAttachmentWrite | + AccessFlagBits::eTransferRead | AccessFlagBits::eTransferWrite | AccessFlagBits::eHostRead | AccessFlagBits::eHostWrite | AccessFlagBits::eMemoryRead | + AccessFlagBits::eMemoryWrite | AccessFlagBits::eNone | AccessFlagBits::eTransformFeedbackWriteEXT | AccessFlagBits::eTransformFeedbackCounterReadEXT | + AccessFlagBits::eTransformFeedbackCounterWriteEXT | AccessFlagBits::eConditionalRenderingReadEXT | AccessFlagBits::eColorAttachmentReadNoncoherentEXT | + AccessFlagBits::eAccelerationStructureReadKHR | AccessFlagBits::eAccelerationStructureWriteKHR | AccessFlagBits::eFragmentDensityMapReadEXT | + AccessFlagBits::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits::eCommandPreprocessReadNV | AccessFlagBits::eCommandPreprocessWriteNV; + }; + + enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags + { + eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + }; + + using AttachmentDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AttachmentDescriptionFlags allFlags = AttachmentDescriptionFlagBits::eMayAlias; + }; + + enum class AttachmentLoadOp + { + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT + }; + + enum class AttachmentStoreOp + { + eStore = VK_ATTACHMENT_STORE_OP_STORE, + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, + eNone = VK_ATTACHMENT_STORE_OP_NONE, + eNoneKHR = VK_ATTACHMENT_STORE_OP_NONE_KHR, + eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM, + eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT + }; + + enum class DependencyFlagBits : VkDependencyFlags + { + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, + eFeedbackLoopEXT = VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT + }; + + using DependencyFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DependencyFlags allFlags = + DependencyFlagBits::eByRegion | DependencyFlagBits::eDeviceGroup | DependencyFlagBits::eViewLocal | DependencyFlagBits::eFeedbackLoopEXT; + }; + + enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags + { + eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR + }; + + using FramebufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FramebufferCreateFlags allFlags = FramebufferCreateFlagBits::eImageless; + }; + + enum class PipelineBindPoint + { + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, + eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI + }; + + enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags + { + eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM + }; + + using RenderPassCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR RenderPassCreateFlags allFlags = RenderPassCreateFlagBits::eTransformQCOM; + }; + + enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags + { + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX, + eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, + eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, + eRasterizationOrderAttachmentColorAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentDepthAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentStencilAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentColorAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentDepthAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentStencilAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + eEnableLegacyDitheringEXT = VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT + }; + + using SubpassDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubpassDescriptionFlags allFlags = + SubpassDescriptionFlagBits::ePerViewAttributesNVX | SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX | + SubpassDescriptionFlagBits::eFragmentRegionQCOM | SubpassDescriptionFlagBits::eShaderResolveQCOM | + SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT | + SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT | SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT; + }; + + enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags + { + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + }; + + using CommandPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolCreateFlags allFlags = + CommandPoolCreateFlagBits::eTransient | CommandPoolCreateFlagBits::eResetCommandBuffer | CommandPoolCreateFlagBits::eProtected; + }; + + enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags + { + eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT + }; + + using CommandPoolResetFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolResetFlags allFlags = CommandPoolResetFlagBits::eReleaseResources; + }; + + enum class CommandBufferLevel + { + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY + }; + + enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags + { + eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT + }; + + using CommandBufferResetFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferResetFlags allFlags = CommandBufferResetFlagBits::eReleaseResources; + }; + + enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags + { + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + }; + + using CommandBufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferUsageFlags allFlags = + CommandBufferUsageFlagBits::eOneTimeSubmit | CommandBufferUsageFlagBits::eRenderPassContinue | CommandBufferUsageFlagBits::eSimultaneousUse; + }; + + enum class QueryControlFlagBits : VkQueryControlFlags + { + ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + }; + + using QueryControlFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryControlFlags allFlags = QueryControlFlagBits::ePrecise; + }; + + enum class IndexType + { + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32, + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eNoneNV = VK_INDEX_TYPE_NONE_NV, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT + }; + + enum class StencilFaceFlagBits : VkStencilFaceFlags + { + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, + eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + }; + + using StencilFaceFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StencilFaceFlags allFlags = + StencilFaceFlagBits::eFront | StencilFaceFlagBits::eBack | StencilFaceFlagBits::eFrontAndBack; + }; + + enum class SubpassContents + { + eInline = VK_SUBPASS_CONTENTS_INLINE, + eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + eInlineAndSecondaryCommandBuffersEXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT + }; + + //=== VK_VERSION_1_1 === + + enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags + { + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + }; + + using SubgroupFeatureFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubgroupFeatureFlags allFlags = + SubgroupFeatureFlagBits::eBasic | SubgroupFeatureFlagBits::eVote | SubgroupFeatureFlagBits::eArithmetic | SubgroupFeatureFlagBits::eBallot | + SubgroupFeatureFlagBits::eShuffle | SubgroupFeatureFlagBits::eShuffleRelative | SubgroupFeatureFlagBits::eClustered | SubgroupFeatureFlagBits::eQuad | + SubgroupFeatureFlagBits::ePartitionedNV; + }; + + enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags + { + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT + }; + using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits; + + using PeerMemoryFeatureFlags = Flags; + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PeerMemoryFeatureFlags allFlags = PeerMemoryFeatureFlagBits::eCopySrc | PeerMemoryFeatureFlagBits::eCopyDst | + PeerMemoryFeatureFlagBits::eGenericSrc | PeerMemoryFeatureFlagBits::eGenericDst; + }; + + enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags + { + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT + }; + using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; + + using MemoryAllocateFlags = Flags; + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryAllocateFlags allFlags = + MemoryAllocateFlagBits::eDeviceMask | MemoryAllocateFlagBits::eDeviceAddress | MemoryAllocateFlagBits::eDeviceAddressCaptureReplay; + }; + + enum class CommandPoolTrimFlagBits : VkCommandPoolTrimFlags + { + }; + + using CommandPoolTrimFlags = Flags; + using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolTrimFlags allFlags = {}; + }; + + enum class PointClippingBehavior + { + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY + }; + using PointClippingBehaviorKHR = PointClippingBehavior; + + enum class TessellationDomainOrigin + { + eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT + }; + using TessellationDomainOriginKHR = TessellationDomainOrigin; + + enum class SamplerYcbcrModelConversion + { + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 + }; + using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; + + enum class SamplerYcbcrRange + { + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW + }; + using SamplerYcbcrRangeKHR = SamplerYcbcrRange; + + enum class ChromaLocation + { + eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT + }; + using ChromaLocationKHR = ChromaLocation; + + enum class DescriptorUpdateTemplateType + { + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + }; + using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; + + enum class DescriptorUpdateTemplateCreateFlagBits : VkDescriptorUpdateTemplateCreateFlags + { + }; + + using DescriptorUpdateTemplateCreateFlags = Flags; + using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorUpdateTemplateCreateFlags allFlags = {}; + }; + + enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eRdmaAddressNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenBufferQNX = VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + }; + using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; + + using ExternalMemoryHandleTypeFlags = Flags; + using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlags allFlags = + ExternalMemoryHandleTypeFlagBits::eOpaqueFd | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt | + ExternalMemoryHandleTypeFlagBits::eD3D11Texture | ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt | ExternalMemoryHandleTypeFlagBits::eD3D12Heap | + ExternalMemoryHandleTypeFlagBits::eD3D12Resource | ExternalMemoryHandleTypeFlagBits::eDmaBufEXT +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + | ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT | ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + | ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + | ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ; + }; + + enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + }; + using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; + + using ExternalMemoryFeatureFlags = Flags; + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlags allFlags = + ExternalMemoryFeatureFlagBits::eDedicatedOnly | ExternalMemoryFeatureFlagBits::eExportable | ExternalMemoryFeatureFlagBits::eImportable; + }; + + enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + }; + using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; + + using ExternalFenceHandleTypeFlags = Flags; + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceHandleTypeFlags allFlags = + ExternalFenceHandleTypeFlagBits::eOpaqueFd | ExternalFenceHandleTypeFlagBits::eOpaqueWin32 | ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt | + ExternalFenceHandleTypeFlagBits::eSyncFd; + }; + + enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags + { + eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT + }; + using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits; + + using ExternalFenceFeatureFlags = Flags; + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceFeatureFlags allFlags = + ExternalFenceFeatureFlagBits::eExportable | ExternalFenceFeatureFlagBits::eImportable; + }; + + enum class FenceImportFlagBits : VkFenceImportFlags + { + eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT + }; + using FenceImportFlagBitsKHR = FenceImportFlagBits; + + using FenceImportFlags = Flags; + using FenceImportFlagsKHR = FenceImportFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FenceImportFlags allFlags = FenceImportFlagBits::eTemporary; + }; + + enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags + { + eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + }; + using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits; + + using SemaphoreImportFlags = Flags; + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreImportFlags allFlags = SemaphoreImportFlagBits::eTemporary; + }; + + enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; + + using ExternalSemaphoreHandleTypeFlags = Flags; + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreHandleTypeFlags allFlags = + ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd | ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 | + ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt | ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence | ExternalSemaphoreHandleTypeFlagBits::eSyncFd +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + ; + }; + + enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags + { + eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT + }; + using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits; + + using ExternalSemaphoreFeatureFlags = Flags; + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreFeatureFlags allFlags = + ExternalSemaphoreFeatureFlagBits::eExportable | ExternalSemaphoreFeatureFlagBits::eImportable; + }; + + //=== VK_VERSION_1_2 === + + enum class DriverId + { + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, + eMesaRadv = VK_DRIVER_ID_MESA_RADV, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, + eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, + eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, + eMoltenvk = VK_DRIVER_ID_MOLTENVK, + eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY, + eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY, + eVerisiliconProprietary = VK_DRIVER_ID_VERISILICON_PROPRIETARY, + eMesaTurnip = VK_DRIVER_ID_MESA_TURNIP, + eMesaV3Dv = VK_DRIVER_ID_MESA_V3DV, + eMesaPanvk = VK_DRIVER_ID_MESA_PANVK, + eSamsungProprietary = VK_DRIVER_ID_SAMSUNG_PROPRIETARY, + eMesaVenus = VK_DRIVER_ID_MESA_VENUS, + eMesaDozen = VK_DRIVER_ID_MESA_DOZEN, + eMesaNvk = VK_DRIVER_ID_MESA_NVK, + eImaginationOpenSourceMESA = VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA, + eMesaAgxv = VK_DRIVER_ID_MESA_AGXV + }; + using DriverIdKHR = DriverId; + + enum class ShaderFloatControlsIndependence + { + e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE + }; + using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence; + + enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags + { + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT + }; + using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits; + + using DescriptorBindingFlags = Flags; + using DescriptorBindingFlagsEXT = DescriptorBindingFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorBindingFlags allFlags = + DescriptorBindingFlagBits::eUpdateAfterBind | DescriptorBindingFlagBits::eUpdateUnusedWhilePending | DescriptorBindingFlagBits::ePartiallyBound | + DescriptorBindingFlagBits::eVariableDescriptorCount; + }; + + enum class ResolveModeFlagBits : VkResolveModeFlags + { + eNone = VK_RESOLVE_MODE_NONE, + eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, + eMin = VK_RESOLVE_MODE_MIN_BIT, + eMax = VK_RESOLVE_MODE_MAX_BIT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eExternalFormatDownsampleANDROID = VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + }; + using ResolveModeFlagBitsKHR = ResolveModeFlagBits; + + using ResolveModeFlags = Flags; + using ResolveModeFlagsKHR = ResolveModeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ResolveModeFlags allFlags = ResolveModeFlagBits::eNone | ResolveModeFlagBits::eSampleZero | + ResolveModeFlagBits::eAverage | ResolveModeFlagBits::eMin | ResolveModeFlagBits::eMax +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | ResolveModeFlagBits::eExternalFormatDownsampleANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ; + }; + + enum class SamplerReductionMode + { + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX, + eWeightedAverageRangeclampQCOM = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM + }; + using SamplerReductionModeEXT = SamplerReductionMode; + + enum class SemaphoreType + { + eBinary = VK_SEMAPHORE_TYPE_BINARY, + eTimeline = VK_SEMAPHORE_TYPE_TIMELINE + }; + using SemaphoreTypeKHR = SemaphoreType; + + enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags + { + eAny = VK_SEMAPHORE_WAIT_ANY_BIT + }; + using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits; + + using SemaphoreWaitFlags = Flags; + using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreWaitFlags allFlags = SemaphoreWaitFlagBits::eAny; + }; + + //=== VK_VERSION_1_3 === + + enum class PipelineCreationFeedbackFlagBits : VkPipelineCreationFeedbackFlags + { + eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, + eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT, + eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT + }; + using PipelineCreationFeedbackFlagBitsEXT = PipelineCreationFeedbackFlagBits; + + using PipelineCreationFeedbackFlags = Flags; + using PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreationFeedbackFlags allFlags = PipelineCreationFeedbackFlagBits::eValid | + PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit | + PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration; + }; + + enum class ToolPurposeFlagBits : VkToolPurposeFlags + { + eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT, + eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT, + eTracing = VK_TOOL_PURPOSE_TRACING_BIT, + eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT, + eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT, + eDebugReportingEXT = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT, + eDebugMarkersEXT = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT + }; + using ToolPurposeFlagBitsEXT = ToolPurposeFlagBits; + + using ToolPurposeFlags = Flags; + using ToolPurposeFlagsEXT = ToolPurposeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ToolPurposeFlags allFlags = + ToolPurposeFlagBits::eValidation | ToolPurposeFlagBits::eProfiling | ToolPurposeFlagBits::eTracing | ToolPurposeFlagBits::eAdditionalFeatures | + ToolPurposeFlagBits::eModifyingFeatures | ToolPurposeFlagBits::eDebugReportingEXT | ToolPurposeFlagBits::eDebugMarkersEXT; + }; + + enum class PrivateDataSlotCreateFlagBits : VkPrivateDataSlotCreateFlags + { + }; + using PrivateDataSlotCreateFlagBitsEXT = PrivateDataSlotCreateFlagBits; + + using PrivateDataSlotCreateFlags = Flags; + using PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PrivateDataSlotCreateFlags allFlags = {}; + }; + + enum class PipelineStageFlagBits2 : VkPipelineStageFlags2 + { + eNone = VK_PIPELINE_STAGE_2_NONE, + eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT, + eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT, + eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_2_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT, + eCopy = VK_PIPELINE_STAGE_2_COPY_BIT, + eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT, + eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT, + eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT, + eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT, + eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT, + ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT, + eVideoDecodeKHR = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV, + eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, + eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eRayTracingShaderKHR = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, + eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, + eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV, + eTaskShaderEXT = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT, + eMeshShaderEXT = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT, + eSubpassShaderHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, + eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, + eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI, + eAccelerationStructureCopyKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR, + eMicromapBuildEXT = VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT, + eClusterCullingShaderHUAWEI = VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI, + eOpticalFlowNV = VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV + }; + using PipelineStageFlagBits2KHR = PipelineStageFlagBits2; + + using PipelineStageFlags2 = Flags; + using PipelineStageFlags2KHR = PipelineStageFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags2 allFlags = + PipelineStageFlagBits2::eNone | PipelineStageFlagBits2::eTopOfPipe | PipelineStageFlagBits2::eDrawIndirect | PipelineStageFlagBits2::eVertexInput | + PipelineStageFlagBits2::eVertexShader | PipelineStageFlagBits2::eTessellationControlShader | PipelineStageFlagBits2::eTessellationEvaluationShader | + PipelineStageFlagBits2::eGeometryShader | PipelineStageFlagBits2::eFragmentShader | PipelineStageFlagBits2::eEarlyFragmentTests | + PipelineStageFlagBits2::eLateFragmentTests | PipelineStageFlagBits2::eColorAttachmentOutput | PipelineStageFlagBits2::eComputeShader | + PipelineStageFlagBits2::eAllTransfer | PipelineStageFlagBits2::eBottomOfPipe | PipelineStageFlagBits2::eHost | PipelineStageFlagBits2::eAllGraphics | + PipelineStageFlagBits2::eAllCommands | PipelineStageFlagBits2::eCopy | PipelineStageFlagBits2::eResolve | PipelineStageFlagBits2::eBlit | + PipelineStageFlagBits2::eClear | PipelineStageFlagBits2::eIndexInput | PipelineStageFlagBits2::eVertexAttributeInput | + PipelineStageFlagBits2::ePreRasterizationShaders | PipelineStageFlagBits2::eVideoDecodeKHR | PipelineStageFlagBits2::eVideoEncodeKHR | + PipelineStageFlagBits2::eTransformFeedbackEXT | PipelineStageFlagBits2::eConditionalRenderingEXT | PipelineStageFlagBits2::eCommandPreprocessNV | + PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits2::eAccelerationStructureBuildKHR | + PipelineStageFlagBits2::eRayTracingShaderKHR | PipelineStageFlagBits2::eFragmentDensityProcessEXT | PipelineStageFlagBits2::eTaskShaderEXT | + PipelineStageFlagBits2::eMeshShaderEXT | PipelineStageFlagBits2::eSubpassShaderHUAWEI | PipelineStageFlagBits2::eInvocationMaskHUAWEI | + PipelineStageFlagBits2::eAccelerationStructureCopyKHR | PipelineStageFlagBits2::eMicromapBuildEXT | PipelineStageFlagBits2::eClusterCullingShaderHUAWEI | + PipelineStageFlagBits2::eOpticalFlowNV; + }; + + enum class AccessFlagBits2 : VkAccessFlags2 + { + eNone = VK_ACCESS_2_NONE, + eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_2_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_2_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_2_HOST_READ_BIT, + eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT, + eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT, + eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT, + eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT, + eVideoDecodeReadKHR = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, + eVideoDecodeWriteKHR = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, + eVideoEncodeReadKHR = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, + eVideoEncodeWriteKHR = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, + eTransformFeedbackWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, + eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, + eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, + eAccelerationStructureReadKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureWriteKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eDescriptorBufferReadEXT = VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT, + eInvocationMaskReadHUAWEI = VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI, + eShaderBindingTableReadKHR = VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR, + eMicromapReadEXT = VK_ACCESS_2_MICROMAP_READ_BIT_EXT, + eMicromapWriteEXT = VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT, + eOpticalFlowReadNV = VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV, + eOpticalFlowWriteNV = VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV + }; + using AccessFlagBits2KHR = AccessFlagBits2; + + using AccessFlags2 = Flags; + using AccessFlags2KHR = AccessFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags2 allFlags = + AccessFlagBits2::eNone | AccessFlagBits2::eIndirectCommandRead | AccessFlagBits2::eIndexRead | AccessFlagBits2::eVertexAttributeRead | + AccessFlagBits2::eUniformRead | AccessFlagBits2::eInputAttachmentRead | AccessFlagBits2::eShaderRead | AccessFlagBits2::eShaderWrite | + AccessFlagBits2::eColorAttachmentRead | AccessFlagBits2::eColorAttachmentWrite | AccessFlagBits2::eDepthStencilAttachmentRead | + AccessFlagBits2::eDepthStencilAttachmentWrite | AccessFlagBits2::eTransferRead | AccessFlagBits2::eTransferWrite | AccessFlagBits2::eHostRead | + AccessFlagBits2::eHostWrite | AccessFlagBits2::eMemoryRead | AccessFlagBits2::eMemoryWrite | AccessFlagBits2::eShaderSampledRead | + AccessFlagBits2::eShaderStorageRead | AccessFlagBits2::eShaderStorageWrite | AccessFlagBits2::eVideoDecodeReadKHR | + AccessFlagBits2::eVideoDecodeWriteKHR | AccessFlagBits2::eVideoEncodeReadKHR | AccessFlagBits2::eVideoEncodeWriteKHR | + AccessFlagBits2::eTransformFeedbackWriteEXT | AccessFlagBits2::eTransformFeedbackCounterReadEXT | AccessFlagBits2::eTransformFeedbackCounterWriteEXT | + AccessFlagBits2::eConditionalRenderingReadEXT | AccessFlagBits2::eCommandPreprocessReadNV | AccessFlagBits2::eCommandPreprocessWriteNV | + AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits2::eAccelerationStructureReadKHR | + AccessFlagBits2::eAccelerationStructureWriteKHR | AccessFlagBits2::eFragmentDensityMapReadEXT | AccessFlagBits2::eColorAttachmentReadNoncoherentEXT | + AccessFlagBits2::eDescriptorBufferReadEXT | AccessFlagBits2::eInvocationMaskReadHUAWEI | AccessFlagBits2::eShaderBindingTableReadKHR | + AccessFlagBits2::eMicromapReadEXT | AccessFlagBits2::eMicromapWriteEXT | AccessFlagBits2::eOpticalFlowReadNV | AccessFlagBits2::eOpticalFlowWriteNV; + }; + + enum class SubmitFlagBits : VkSubmitFlags + { + eProtected = VK_SUBMIT_PROTECTED_BIT + }; + using SubmitFlagBitsKHR = SubmitFlagBits; + + using SubmitFlags = Flags; + using SubmitFlagsKHR = SubmitFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubmitFlags allFlags = SubmitFlagBits::eProtected; + }; + + enum class RenderingFlagBits : VkRenderingFlags + { + eContentsSecondaryCommandBuffers = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, + eSuspending = VK_RENDERING_SUSPENDING_BIT, + eResuming = VK_RENDERING_RESUMING_BIT, + eContentsInlineEXT = VK_RENDERING_CONTENTS_INLINE_BIT_EXT, + eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT + }; + using RenderingFlagBitsKHR = RenderingFlagBits; + + using RenderingFlags = Flags; + using RenderingFlagsKHR = RenderingFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR RenderingFlags allFlags = RenderingFlagBits::eContentsSecondaryCommandBuffers | RenderingFlagBits::eSuspending | + RenderingFlagBits::eResuming | RenderingFlagBits::eContentsInlineEXT | + RenderingFlagBits::eEnableLegacyDitheringEXT; + }; + + enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2 + { + eSampledImage = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = + VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_2_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT, + eStorageReadWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT, + eStorageWriteWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT, + eSampledImageDepthComparison = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT, + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR, + eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eHostImageTransferEXT = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT, + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR, + eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV, + eWeightImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM, + eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM, + eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM, + eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM, + eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV, + eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV, + eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV + }; + using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2; + + using FormatFeatureFlags2 = Flags; + using FormatFeatureFlags2KHR = FormatFeatureFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags2 allFlags = + FormatFeatureFlagBits2::eSampledImage | FormatFeatureFlagBits2::eStorageImage | FormatFeatureFlagBits2::eStorageImageAtomic | + FormatFeatureFlagBits2::eUniformTexelBuffer | FormatFeatureFlagBits2::eStorageTexelBuffer | FormatFeatureFlagBits2::eStorageTexelBufferAtomic | + FormatFeatureFlagBits2::eVertexBuffer | FormatFeatureFlagBits2::eColorAttachment | FormatFeatureFlagBits2::eColorAttachmentBlend | + FormatFeatureFlagBits2::eDepthStencilAttachment | FormatFeatureFlagBits2::eBlitSrc | FormatFeatureFlagBits2::eBlitDst | + FormatFeatureFlagBits2::eSampledImageFilterLinear | FormatFeatureFlagBits2::eSampledImageFilterCubic | FormatFeatureFlagBits2::eTransferSrc | + FormatFeatureFlagBits2::eTransferDst | FormatFeatureFlagBits2::eSampledImageFilterMinmax | FormatFeatureFlagBits2::eMidpointChromaSamples | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter | FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable | FormatFeatureFlagBits2::eDisjoint | + FormatFeatureFlagBits2::eCositedChromaSamples | FormatFeatureFlagBits2::eStorageReadWithoutFormat | FormatFeatureFlagBits2::eStorageWriteWithoutFormat | + FormatFeatureFlagBits2::eSampledImageDepthComparison | FormatFeatureFlagBits2::eVideoDecodeOutputKHR | FormatFeatureFlagBits2::eVideoDecodeDpbKHR | + FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR | FormatFeatureFlagBits2::eFragmentDensityMapEXT | + FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits2::eHostImageTransferEXT | FormatFeatureFlagBits2::eVideoEncodeInputKHR | + FormatFeatureFlagBits2::eVideoEncodeDpbKHR | FormatFeatureFlagBits2::eLinearColorAttachmentNV | FormatFeatureFlagBits2::eWeightImageQCOM | + FormatFeatureFlagBits2::eWeightSampledImageQCOM | FormatFeatureFlagBits2::eBlockMatchingQCOM | FormatFeatureFlagBits2::eBoxFilterSampledQCOM | + FormatFeatureFlagBits2::eOpticalFlowImageNV | FormatFeatureFlagBits2::eOpticalFlowVectorNV | FormatFeatureFlagBits2::eOpticalFlowCostNV; + }; + + //=== VK_KHR_surface === + + enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR + { + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + }; + + using SurfaceTransformFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceTransformFlagsKHR allFlags = + SurfaceTransformFlagBitsKHR::eIdentity | SurfaceTransformFlagBitsKHR::eRotate90 | SurfaceTransformFlagBitsKHR::eRotate180 | + SurfaceTransformFlagBitsKHR::eRotate270 | SurfaceTransformFlagBitsKHR::eHorizontalMirror | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 | + SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 | SurfaceTransformFlagBitsKHR::eInherit; + }; + + enum class PresentModeKHR + { + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + }; + + enum class ColorSpaceKHR + { + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD + }; + + enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR + { + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + }; + + using CompositeAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CompositeAlphaFlagsKHR allFlags = CompositeAlphaFlagBitsKHR::eOpaque | CompositeAlphaFlagBitsKHR::ePreMultiplied | + CompositeAlphaFlagBitsKHR::ePostMultiplied | CompositeAlphaFlagBitsKHR::eInherit; + }; + + //=== VK_KHR_swapchain === + + enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR + { + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, + eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR, + eDeferredMemoryAllocationEXT = VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT + }; + + using SwapchainCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SwapchainCreateFlagsKHR allFlags = + SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions | SwapchainCreateFlagBitsKHR::eProtected | SwapchainCreateFlagBitsKHR::eMutableFormat | + SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT; + }; + + enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR + { + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + }; + + using DeviceGroupPresentModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceGroupPresentModeFlagsKHR allFlags = + DeviceGroupPresentModeFlagBitsKHR::eLocal | DeviceGroupPresentModeFlagBitsKHR::eRemote | DeviceGroupPresentModeFlagBitsKHR::eSum | + DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice; + }; + + //=== VK_KHR_display === + + enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR + { + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + }; + + using DisplayPlaneAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayPlaneAlphaFlagsKHR allFlags = DisplayPlaneAlphaFlagBitsKHR::eOpaque | DisplayPlaneAlphaFlagBitsKHR::eGlobal | + DisplayPlaneAlphaFlagBitsKHR::ePerPixel | + DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied; + }; + + enum class DisplayModeCreateFlagBitsKHR : VkDisplayModeCreateFlagsKHR + { + }; + + using DisplayModeCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayModeCreateFlagsKHR allFlags = {}; + }; + + enum class DisplaySurfaceCreateFlagBitsKHR : VkDisplaySurfaceCreateFlagsKHR + { + }; + + using DisplaySurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplaySurfaceCreateFlagsKHR allFlags = {}; + }; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + enum class XlibSurfaceCreateFlagBitsKHR : VkXlibSurfaceCreateFlagsKHR + { + }; + + using XlibSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR XlibSurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + enum class XcbSurfaceCreateFlagBitsKHR : VkXcbSurfaceCreateFlagsKHR + { + }; + + using XcbSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR XcbSurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + enum class WaylandSurfaceCreateFlagBitsKHR : VkWaylandSurfaceCreateFlagsKHR + { + }; + + using WaylandSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR WaylandSurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + enum class AndroidSurfaceCreateFlagBitsKHR : VkAndroidSurfaceCreateFlagsKHR + { + }; + + using AndroidSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AndroidSurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + enum class Win32SurfaceCreateFlagBitsKHR : VkWin32SurfaceCreateFlagsKHR + { + }; + + using Win32SurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR Win32SurfaceCreateFlagsKHR allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT + { + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + }; + + using DebugReportFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugReportFlagsEXT allFlags = DebugReportFlagBitsEXT::eInformation | DebugReportFlagBitsEXT::eWarning | + DebugReportFlagBitsEXT::ePerformanceWarning | DebugReportFlagBitsEXT::eError | + DebugReportFlagBitsEXT::eDebug; + }; + + enum class DebugReportObjectTypeEXT + { + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, + eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT, + eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eCudaModuleNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT, + eCudaFunctionNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + }; + + //=== VK_AMD_rasterization_order === + + enum class RasterizationOrderAMD + { + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + + //=== VK_KHR_video_queue === + + enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR + { + eNone = VK_VIDEO_CODEC_OPERATION_NONE_KHR, + eEncodeH264 = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR, + eEncodeH265 = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR, + eDecodeH264 = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR, + eDecodeH265 = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR + }; + + using VideoCodecOperationFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodecOperationFlagsKHR allFlags = + VideoCodecOperationFlagBitsKHR::eNone | VideoCodecOperationFlagBitsKHR::eEncodeH264 | VideoCodecOperationFlagBitsKHR::eEncodeH265 | + VideoCodecOperationFlagBitsKHR::eDecodeH264 | VideoCodecOperationFlagBitsKHR::eDecodeH265; + }; + + enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR + { + eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR, + eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR, + e420 = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR, + e422 = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR, + e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR + }; + + using VideoChromaSubsamplingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoChromaSubsamplingFlagsKHR allFlags = + VideoChromaSubsamplingFlagBitsKHR::eInvalid | VideoChromaSubsamplingFlagBitsKHR::eMonochrome | VideoChromaSubsamplingFlagBitsKHR::e420 | + VideoChromaSubsamplingFlagBitsKHR::e422 | VideoChromaSubsamplingFlagBitsKHR::e444; + }; + + enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR + { + eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR, + e8 = VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, + e10 = VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR, + e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR + }; + + using VideoComponentBitDepthFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoComponentBitDepthFlagsKHR allFlags = + VideoComponentBitDepthFlagBitsKHR::eInvalid | VideoComponentBitDepthFlagBitsKHR::e8 | VideoComponentBitDepthFlagBitsKHR::e10 | + VideoComponentBitDepthFlagBitsKHR::e12; + }; + + enum class VideoCapabilityFlagBitsKHR : VkVideoCapabilityFlagsKHR + { + eProtectedContent = VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR, + eSeparateReferenceImages = VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR + }; + + using VideoCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCapabilityFlagsKHR allFlags = + VideoCapabilityFlagBitsKHR::eProtectedContent | VideoCapabilityFlagBitsKHR::eSeparateReferenceImages; + }; + + enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR + { + eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR, + eAllowEncodeParameterOptimizations = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR, + eInlineQueries = VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR + }; + + using VideoSessionCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionCreateFlagsKHR allFlags = VideoSessionCreateFlagBitsKHR::eProtectedContent | + VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations | + VideoSessionCreateFlagBitsKHR::eInlineQueries; + }; + + enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR + { + eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR, + eEncodeRateControl = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR, + eEncodeQualityLevel = VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR + }; + + using VideoCodingControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodingControlFlagsKHR allFlags = + VideoCodingControlFlagBitsKHR::eReset | VideoCodingControlFlagBitsKHR::eEncodeRateControl | VideoCodingControlFlagBitsKHR::eEncodeQualityLevel; + }; + + enum class QueryResultStatusKHR + { + eError = VK_QUERY_RESULT_STATUS_ERROR_KHR, + eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR, + eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR, + eInsufficientBitstreamBufferRange = VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR + }; + + enum class VideoSessionParametersCreateFlagBitsKHR : VkVideoSessionParametersCreateFlagsKHR + { + }; + + using VideoSessionParametersCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionParametersCreateFlagsKHR allFlags = {}; + }; + + enum class VideoBeginCodingFlagBitsKHR : VkVideoBeginCodingFlagsKHR + { + }; + + using VideoBeginCodingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoBeginCodingFlagsKHR allFlags = {}; + }; + + enum class VideoEndCodingFlagBitsKHR : VkVideoEndCodingFlagsKHR + { + }; + + using VideoEndCodingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEndCodingFlagsKHR allFlags = {}; + }; + + //=== VK_KHR_video_decode_queue === + + enum class VideoDecodeCapabilityFlagBitsKHR : VkVideoDecodeCapabilityFlagsKHR + { + eDpbAndOutputCoincide = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR, + eDpbAndOutputDistinct = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR + }; + + using VideoDecodeCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeCapabilityFlagsKHR allFlags = + VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide | VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct; + }; + + enum class VideoDecodeUsageFlagBitsKHR : VkVideoDecodeUsageFlagsKHR + { + eDefault = VK_VIDEO_DECODE_USAGE_DEFAULT_KHR, + eTranscoding = VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR, + eOffline = VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR, + eStreaming = VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR + }; + + using VideoDecodeUsageFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeUsageFlagsKHR allFlags = VideoDecodeUsageFlagBitsKHR::eDefault | VideoDecodeUsageFlagBitsKHR::eTranscoding | + VideoDecodeUsageFlagBitsKHR::eOffline | VideoDecodeUsageFlagBitsKHR::eStreaming; + }; + + enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR + { + }; + + using VideoDecodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeFlagsKHR allFlags = {}; + }; + + //=== VK_EXT_transform_feedback === + + enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkPipelineRasterizationStateStreamCreateFlagsEXT + { + }; + + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateStreamCreateFlagsEXT allFlags = {}; + }; + + //=== VK_KHR_video_encode_h264 === + + enum class VideoEncodeH264CapabilityFlagBitsKHR : VkVideoEncodeH264CapabilityFlagsKHR + { + eHrdCompliance = VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR, + ePredictionWeightTableGenerated = VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR, + eRowUnalignedSlice = VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR, + eDifferentSliceType = VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR, + eBFrameInL0List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR, + eBFrameInL1List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR, + ePerPictureTypeMinMaxQp = VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR, + ePerSliceConstantQp = VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR, + eGeneratePrefixNalu = VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR + }; + + using VideoEncodeH264CapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264CapabilityFlagsKHR allFlags = + VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance | VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated | + VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice | VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType | + VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List | VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List | + VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp | VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp | + VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu; + }; + + enum class VideoEncodeH264StdFlagBitsKHR : VkVideoEncodeH264StdFlagsKHR + { + eSeparateColorPlaneFlagSet = VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR, + eQpprimeYZeroTransformBypassFlagSet = VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR, + eScalingMatrixPresentFlagSet = VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR, + eChromaQpIndexOffset = VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR, + eSecondChromaQpIndexOffset = VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR, + ePicInitQpMinus26 = VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR, + eWeightedPredFlagSet = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR, + eWeightedBipredIdcExplicit = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR, + eWeightedBipredIdcImplicit = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR, + eTransform8X8ModeFlagSet = VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR, + eDirectSpatialMvPredFlagUnset = VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR, + eEntropyCodingModeFlagUnset = VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR, + eEntropyCodingModeFlagSet = VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR, + eDirect8X8InferenceFlagUnset = VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR, + eConstrainedIntraPredFlagSet = VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR, + eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR, + eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR, + eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR, + eSliceQpDelta = VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR, + eDifferentSliceQpDelta = VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR + }; + + using VideoEncodeH264StdFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264StdFlagsKHR allFlags = + VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet | VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet | + VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet | VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset | + VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset | VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26 | + VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet | VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit | + VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit | VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet | + VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset | VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset | + VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet | VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset | + VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet | VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled | + VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled | VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial | + VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta | VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta; + }; + + enum class VideoEncodeH264RateControlFlagBitsKHR : VkVideoEncodeH264RateControlFlagsKHR + { + eAttemptHrdCompliance = VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR, + eRegularGop = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR, + eTemporalLayerPatternDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR + }; + + using VideoEncodeH264RateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264RateControlFlagsKHR allFlags = + VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance | VideoEncodeH264RateControlFlagBitsKHR::eRegularGop | + VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic | + VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic; + }; + + //=== VK_KHR_video_encode_h265 === + + enum class VideoEncodeH265CapabilityFlagBitsKHR : VkVideoEncodeH265CapabilityFlagsKHR + { + eHrdCompliance = VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR, + ePredictionWeightTableGenerated = VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR, + eRowUnalignedSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR, + eDifferentSliceSegmentType = VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR, + eBFrameInL0List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR, + eBFrameInL1List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR, + ePerPictureTypeMinMaxQp = VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR, + ePerSliceSegmentConstantQp = VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR, + eMultipleTilesPerSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR, + eMultipleSliceSegmentsPerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR + }; + + using VideoEncodeH265CapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CapabilityFlagsKHR allFlags = + VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance | VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated | + VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType | + VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List | VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List | + VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp | VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp | + VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile; + }; + + enum class VideoEncodeH265StdFlagBitsKHR : VkVideoEncodeH265StdFlagsKHR + { + eSeparateColorPlaneFlagSet = VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR, + eSampleAdaptiveOffsetEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR, + eScalingListDataPresentFlagSet = VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR, + ePcmEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR, + eSpsTemporalMvpEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR, + eInitQpMinus26 = VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR, + eWeightedPredFlagSet = VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR, + eWeightedBipredFlagSet = VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR, + eLog2ParallelMergeLevelMinus2 = VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR, + eSignDataHidingEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR, + eTransformSkipEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR, + eTransformSkipEnabledFlagUnset = VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR, + ePpsSliceChromaQpOffsetsPresentFlagSet = VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR, + eTransquantBypassEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR, + eConstrainedIntraPredFlagSet = VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR, + eEntropyCodingSyncEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR, + eDeblockingFilterOverrideEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR, + eDependentSliceSegmentsEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR, + eDependentSliceSegmentFlagSet = VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR, + eSliceQpDelta = VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR, + eDifferentSliceQpDelta = VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR + }; + + using VideoEncodeH265StdFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265StdFlagsKHR allFlags = + VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet | VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet | VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26 | + VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet | VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet | + VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2 | VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset | + VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet | VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet | VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet | VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta | + VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta; + }; + + enum class VideoEncodeH265CtbSizeFlagBitsKHR : VkVideoEncodeH265CtbSizeFlagsKHR + { + e16 = VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR, + e32 = VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR, + e64 = VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR + }; + + using VideoEncodeH265CtbSizeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CtbSizeFlagsKHR allFlags = + VideoEncodeH265CtbSizeFlagBitsKHR::e16 | VideoEncodeH265CtbSizeFlagBitsKHR::e32 | VideoEncodeH265CtbSizeFlagBitsKHR::e64; + }; + + enum class VideoEncodeH265TransformBlockSizeFlagBitsKHR : VkVideoEncodeH265TransformBlockSizeFlagsKHR + { + e4 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR, + e8 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR, + e16 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR, + e32 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR + }; + + using VideoEncodeH265TransformBlockSizeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsKHR allFlags = + VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4 | VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8 | VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16 | + VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32; + }; + + enum class VideoEncodeH265RateControlFlagBitsKHR : VkVideoEncodeH265RateControlFlagsKHR + { + eAttemptHrdCompliance = VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR, + eRegularGop = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR, + eTemporalSubLayerPatternDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR + }; + + using VideoEncodeH265RateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265RateControlFlagsKHR allFlags = + VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance | VideoEncodeH265RateControlFlagBitsKHR::eRegularGop | + VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic | + VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic; + }; + + //=== VK_KHR_video_decode_h264 === + + enum class VideoDecodeH264PictureLayoutFlagBitsKHR : VkVideoDecodeH264PictureLayoutFlagsKHR + { + eProgressive = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR, + eInterlacedInterleavedLines = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR, + eInterlacedSeparatePlanes = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR + }; + + using VideoDecodeH264PictureLayoutFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeH264PictureLayoutFlagsKHR allFlags = VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive | + VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines | + VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes; + }; + + //=== VK_AMD_shader_info === + + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkStreamDescriptorSurfaceCreateFlagsGGP + { + }; + + using StreamDescriptorSurfaceCreateFlagsGGP = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StreamDescriptorSurfaceCreateFlagsGGP allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + }; + + using ExternalMemoryHandleTypeFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlagsNV allFlags = + ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 | ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt | ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image | + ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt; + }; + + enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + }; + + using ExternalMemoryFeatureFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlagsNV allFlags = + ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly | ExternalMemoryFeatureFlagBitsNV::eExportable | ExternalMemoryFeatureFlagBitsNV::eImportable; + }; + + //=== VK_EXT_validation_flags === + + enum class ValidationCheckEXT + { + eAll = VK_VALIDATION_CHECK_ALL_EXT, + eShaders = VK_VALIDATION_CHECK_SHADERS_EXT + }; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + enum class ViSurfaceCreateFlagBitsNN : VkViSurfaceCreateFlagsNN + { + }; + + using ViSurfaceCreateFlagsNN = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ViSurfaceCreateFlagsNN allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_pipeline_robustness === + + enum class PipelineRobustnessBufferBehaviorEXT + { + eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT, + eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT, + eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT, + eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT + }; + + enum class PipelineRobustnessImageBehaviorEXT + { + eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT, + eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT, + eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT, + eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT + }; + + //=== VK_EXT_conditional_rendering === + + enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT + { + eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT + }; + + using ConditionalRenderingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ConditionalRenderingFlagsEXT allFlags = ConditionalRenderingFlagBitsEXT::eInverted; + }; + + //=== VK_EXT_display_surface_counter === + + enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT + { + eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT + }; + + using SurfaceCounterFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceCounterFlagsEXT allFlags = SurfaceCounterFlagBitsEXT::eVblank; + }; + + //=== VK_EXT_display_control === + + enum class DisplayPowerStateEXT + { + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, + eOn = VK_DISPLAY_POWER_STATE_ON_EXT + }; + + enum class DeviceEventTypeEXT + { + eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + }; + + enum class DisplayEventTypeEXT + { + eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + }; + + //=== VK_NV_viewport_swizzle === + + enum class ViewportCoordinateSwizzleNV + { + ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, + ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, + eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, + ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, + eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, + ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, + eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV + }; + + enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkPipelineViewportSwizzleStateCreateFlagsNV + { + }; + + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportSwizzleStateCreateFlagsNV allFlags = {}; + }; + + //=== VK_EXT_discard_rectangles === + + enum class DiscardRectangleModeEXT + { + eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT + }; + + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkPipelineDiscardRectangleStateCreateFlagsEXT + { + }; + + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDiscardRectangleStateCreateFlagsEXT allFlags = {}; + }; + + //=== VK_EXT_conservative_rasterization === + + enum class ConservativeRasterizationModeEXT + { + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT + }; + + enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkPipelineRasterizationConservativeStateCreateFlagsEXT + { + }; + + using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationConservativeStateCreateFlagsEXT allFlags = {}; + }; + + //=== VK_EXT_depth_clip_enable === + + enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkPipelineRasterizationDepthClipStateCreateFlagsEXT + { + }; + + using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationDepthClipStateCreateFlagsEXT allFlags = {}; + }; + + //=== VK_KHR_performance_query === + + enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR + { + ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR + }; + + using PerformanceCounterDescriptionFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PerformanceCounterDescriptionFlagsKHR allFlags = + PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting | PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted; + }; + + enum class PerformanceCounterScopeKHR + { + eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, + eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR, + eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR + }; + + enum class PerformanceCounterStorageKHR + { + eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR, + eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR, + eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR, + eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR, + eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR, + eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR + }; + + enum class PerformanceCounterUnitKHR + { + eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR, + ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR, + eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR, + eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR, + eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR, + eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR, + eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR, + eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR, + eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR, + eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR, + eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR + }; + + enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR + { + }; + + using AcquireProfilingLockFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AcquireProfilingLockFlagsKHR allFlags = {}; + }; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + enum class IOSSurfaceCreateFlagBitsMVK : VkIOSSurfaceCreateFlagsMVK + { + }; + + using IOSSurfaceCreateFlagsMVK = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IOSSurfaceCreateFlagsMVK allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + enum class MacOSSurfaceCreateFlagBitsMVK : VkMacOSSurfaceCreateFlagsMVK + { + }; + + using MacOSSurfaceCreateFlagsMVK = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MacOSSurfaceCreateFlagsMVK allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT + { + eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + }; + + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT allFlags = + DebugUtilsMessageSeverityFlagBitsEXT::eVerbose | DebugUtilsMessageSeverityFlagBitsEXT::eInfo | DebugUtilsMessageSeverityFlagBitsEXT::eWarning | + DebugUtilsMessageSeverityFlagBitsEXT::eError; + }; + + enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT + { + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT, + eDeviceAddressBinding = VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT + }; + + using DebugUtilsMessageTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageTypeFlagsEXT allFlags = + DebugUtilsMessageTypeFlagBitsEXT::eGeneral | DebugUtilsMessageTypeFlagBitsEXT::eValidation | DebugUtilsMessageTypeFlagBitsEXT::ePerformance | + DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding; + }; + + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkDebugUtilsMessengerCallbackDataFlagsEXT + { + }; + + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCallbackDataFlagsEXT allFlags = {}; + }; + + enum class DebugUtilsMessengerCreateFlagBitsEXT : VkDebugUtilsMessengerCreateFlagsEXT + { + }; + + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCreateFlagsEXT allFlags = {}; + }; + + //=== VK_EXT_blend_operation_advanced === + + enum class BlendOverlapEXT + { + eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + }; + + //=== VK_NV_fragment_coverage_to_color === + + enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkPipelineCoverageToColorStateCreateFlagsNV + { + }; + + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageToColorStateCreateFlagsNV allFlags = {}; + }; + + //=== VK_KHR_acceleration_structure === + + enum class AccelerationStructureTypeKHR + { + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + eGeneric = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR + }; + using AccelerationStructureTypeNV = AccelerationStructureTypeKHR; + + enum class AccelerationStructureBuildTypeKHR + { + eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR, + eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR, + eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR + }; + + enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR + { + eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR + }; + using GeometryFlagBitsNV = GeometryFlagBitsKHR; + + using GeometryFlagsKHR = Flags; + using GeometryFlagsNV = GeometryFlagsKHR; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryFlagsKHR allFlags = GeometryFlagBitsKHR::eOpaque | GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation; + }; + + enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR + { + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eTriangleFrontCounterclockwiseKHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV, + eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT, + eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT + }; + using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; + + using GeometryInstanceFlagsKHR = Flags; + using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryInstanceFlagsKHR allFlags = + GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable | GeometryInstanceFlagBitsKHR::eTriangleFlipFacing | GeometryInstanceFlagBitsKHR::eForceOpaque | + GeometryInstanceFlagBitsKHR::eForceNoOpaque | GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT | + GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT; + }; + + enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR + { + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV, + eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT, + eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT, + eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eAllowDisplacementMicromapUpdateNV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAllowDataAccess = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR + }; + using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; + + using BuildAccelerationStructureFlagsKHR = Flags; + using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BuildAccelerationStructureFlagsKHR allFlags = + BuildAccelerationStructureFlagBitsKHR::eAllowUpdate | BuildAccelerationStructureFlagBitsKHR::eAllowCompaction | + BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace | BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild | + BuildAccelerationStructureFlagBitsKHR::eLowMemory | BuildAccelerationStructureFlagBitsKHR::eMotionNV | + BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT | BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT | + BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess; + }; + + enum class CopyAccelerationStructureModeKHR + { + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR, + eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR + }; + using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; + + enum class GeometryTypeKHR + { + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR + }; + using GeometryTypeNV = GeometryTypeKHR; + + enum class AccelerationStructureCompatibilityKHR + { + eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR, + eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR + }; + + enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR + { + eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, + eDescriptorBufferCaptureReplayEXT = VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eMotionNV = VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV + }; + + using AccelerationStructureCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureCreateFlagsKHR allFlags = + AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay | AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT | + AccelerationStructureCreateFlagBitsKHR::eMotionNV; + }; + + enum class BuildAccelerationStructureModeKHR + { + eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR, + eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR + }; + + //=== VK_KHR_ray_tracing_pipeline === + + enum class RayTracingShaderGroupTypeKHR + { + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR + }; + using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; + + enum class ShaderGroupShaderKHR + { + eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR, + eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR, + eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR, + eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR + }; + + //=== VK_NV_framebuffer_mixed_samples === + + enum class CoverageModulationModeNV + { + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + }; + + enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkPipelineCoverageModulationStateCreateFlagsNV + { + }; + + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageModulationStateCreateFlagsNV allFlags = {}; + }; + + //=== VK_EXT_validation_cache === + + enum class ValidationCacheHeaderVersionEXT + { + eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + }; + + enum class ValidationCacheCreateFlagBitsEXT : VkValidationCacheCreateFlagsEXT + { + }; + + using ValidationCacheCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ValidationCacheCreateFlagsEXT allFlags = {}; + }; + + //=== VK_NV_shading_rate_image === + + enum class ShadingRatePaletteEntryNV + { + eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, + e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, + e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV + }; + + enum class CoarseSampleOrderTypeNV + { + eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, + ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, + eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV + }; + + //=== VK_NV_ray_tracing === + + enum class AccelerationStructureMemoryRequirementsTypeNV + { + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + }; + + //=== VK_AMD_pipeline_compiler_control === + + enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD + { + }; + + using PipelineCompilerControlFlagsAMD = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCompilerControlFlagsAMD allFlags = {}; + }; + + //=== VK_KHR_global_priority === + + enum class QueueGlobalPriorityKHR + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR + }; + using QueueGlobalPriorityEXT = QueueGlobalPriorityKHR; + + //=== VK_AMD_memory_overallocation_behavior === + + enum class MemoryOverallocationBehaviorAMD + { + eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD + }; + + //=== VK_INTEL_performance_query === + + enum class PerformanceConfigurationTypeINTEL + { + eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + }; + + enum class QueryPoolSamplingModeINTEL + { + eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + }; + + enum class PerformanceOverrideTypeINTEL + { + eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, + eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL + }; + + enum class PerformanceParameterTypeINTEL + { + eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, + eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL + }; + + enum class PerformanceValueTypeINTEL + { + eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, + eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, + eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, + eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, + eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkImagePipeSurfaceCreateFlagsFUCHSIA + { + }; + + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImagePipeSurfaceCreateFlagsFUCHSIA allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + enum class MetalSurfaceCreateFlagBitsEXT : VkMetalSurfaceCreateFlagsEXT + { + }; + + using MetalSurfaceCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MetalSurfaceCreateFlagsEXT allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + enum class FragmentShadingRateCombinerOpKHR + { + eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR, + eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR, + eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR, + eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR, + eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR + }; + + //=== VK_AMD_shader_core_properties2 === + + enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD + { + }; + + using ShaderCorePropertiesFlagsAMD = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCorePropertiesFlagsAMD allFlags = {}; + }; + + //=== VK_EXT_validation_features === + + enum class ValidationFeatureEnableEXT + { + eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, + eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT, + eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT, + eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT + }; + + enum class ValidationFeatureDisableEXT + { + eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, + eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, + eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, + eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, + eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, + eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, + eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT, + eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT + }; + + //=== VK_NV_coverage_reduction_mode === + + enum class CoverageReductionModeNV + { + eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, + eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV + }; + + enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkPipelineCoverageReductionStateCreateFlagsNV + { + }; + + using PipelineCoverageReductionStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageReductionStateCreateFlagsNV allFlags = {}; + }; + + //=== VK_EXT_provoking_vertex === + + enum class ProvokingVertexModeEXT + { + eFirstVertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT, + eLastVertex = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + enum class FullScreenExclusiveEXT + { + eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, + eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, + eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, + eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + enum class HeadlessSurfaceCreateFlagBitsEXT : VkHeadlessSurfaceCreateFlagsEXT + { + }; + + using HeadlessSurfaceCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR HeadlessSurfaceCreateFlagsEXT allFlags = {}; + }; + + //=== VK_EXT_line_rasterization === + + enum class LineRasterizationModeEXT + { + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, + eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT + }; + + //=== VK_KHR_pipeline_executable_properties === + + enum class PipelineExecutableStatisticFormatKHR + { + eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, + eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, + eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, + eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR + }; + + //=== VK_EXT_host_image_copy === + + enum class HostImageCopyFlagBitsEXT : VkHostImageCopyFlagsEXT + { + eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY_EXT + }; + + using HostImageCopyFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR HostImageCopyFlagsEXT allFlags = HostImageCopyFlagBitsEXT::eMemcpy; + }; + + //=== VK_KHR_map_memory2 === + + enum class MemoryUnmapFlagBitsKHR : VkMemoryUnmapFlagsKHR + { + }; + + using MemoryUnmapFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlagsKHR allFlags = {}; + }; + + //=== VK_EXT_surface_maintenance1 === + + enum class PresentScalingFlagBitsEXT : VkPresentScalingFlagsEXT + { + eOneToOne = VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT, + eAspectRatioStretch = VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT, + eStretch = VK_PRESENT_SCALING_STRETCH_BIT_EXT + }; + + using PresentScalingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PresentScalingFlagsEXT allFlags = + PresentScalingFlagBitsEXT::eOneToOne | PresentScalingFlagBitsEXT::eAspectRatioStretch | PresentScalingFlagBitsEXT::eStretch; + }; + + enum class PresentGravityFlagBitsEXT : VkPresentGravityFlagsEXT + { + eMin = VK_PRESENT_GRAVITY_MIN_BIT_EXT, + eMax = VK_PRESENT_GRAVITY_MAX_BIT_EXT, + eCentered = VK_PRESENT_GRAVITY_CENTERED_BIT_EXT + }; + + using PresentGravityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PresentGravityFlagsEXT allFlags = + PresentGravityFlagBitsEXT::eMin | PresentGravityFlagBitsEXT::eMax | PresentGravityFlagBitsEXT::eCentered; + }; + + //=== VK_NV_device_generated_commands === + + enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV + { + eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV + }; + + using IndirectStateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectStateFlagsNV allFlags = IndirectStateFlagBitsNV::eFlagFrontface; + }; + + enum class IndirectCommandsTokenTypeNV + { + eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, + eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, + eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV, + eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV, + ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV + }; + + enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV + { + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV + }; + + using IndirectCommandsLayoutUsageFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV allFlags = IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess | + IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences | + IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences; + }; + + //=== VK_EXT_depth_bias_control === + + enum class DepthBiasRepresentationEXT + { + eLeastRepresentableValueFormat = VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT, + eLeastRepresentableValueForceUnorm = VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT, + eFloat = VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT + }; + + //=== VK_EXT_device_memory_report === + + enum class DeviceMemoryReportEventTypeEXT + { + eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT, + eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT, + eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT, + eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT, + eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT + }; + + enum class DeviceMemoryReportFlagBitsEXT : VkDeviceMemoryReportFlagsEXT + { + }; + + using DeviceMemoryReportFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceMemoryReportFlagsEXT allFlags = {}; + }; + + //=== VK_KHR_video_encode_queue === + + enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR + { + ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR, + eInsufficientstreamBufferRangeDetectionBit = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR + }; + + using VideoEncodeCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeCapabilityFlagsKHR allFlags = + VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientstreamBufferRangeDetectionBit; + }; + + enum class VideoEncodeFeedbackFlagBitsKHR : VkVideoEncodeFeedbackFlagsKHR + { + estreamBufferOffsetBit = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR, + estreamBytesWrittenBit = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR, + estreamHasOverridesBit = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR + }; + + using VideoEncodeFeedbackFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFeedbackFlagsKHR allFlags = VideoEncodeFeedbackFlagBitsKHR::estreamBufferOffsetBit | + VideoEncodeFeedbackFlagBitsKHR::estreamBytesWrittenBit | + VideoEncodeFeedbackFlagBitsKHR::estreamHasOverridesBit; + }; + + enum class VideoEncodeUsageFlagBitsKHR : VkVideoEncodeUsageFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR, + eTranscoding = VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR, + eStreaming = VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR, + eRecording = VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR, + eConferencing = VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR + }; + + using VideoEncodeUsageFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeUsageFlagsKHR allFlags = VideoEncodeUsageFlagBitsKHR::eDefault | VideoEncodeUsageFlagBitsKHR::eTranscoding | + VideoEncodeUsageFlagBitsKHR::eStreaming | VideoEncodeUsageFlagBitsKHR::eRecording | + VideoEncodeUsageFlagBitsKHR::eConferencing; + }; + + enum class VideoEncodeContentFlagBitsKHR : VkVideoEncodeContentFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR, + eCamera = VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR, + eDesktop = VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR, + eRendered = VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR + }; + + using VideoEncodeContentFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeContentFlagsKHR allFlags = + VideoEncodeContentFlagBitsKHR::eDefault | VideoEncodeContentFlagBitsKHR::eCamera | VideoEncodeContentFlagBitsKHR::eDesktop | + VideoEncodeContentFlagBitsKHR::eRendered; + }; + + enum class VideoEncodeTuningModeKHR + { + eDefault = VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR, + eHighQuality = VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR, + eLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR, + eUltraLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR, + eLossless = VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR + }; + + enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR + { + eDefault = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR, + eDisabled = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR, + eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR, + eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR + }; + + using VideoEncodeRateControlModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlModeFlagsKHR allFlags = + VideoEncodeRateControlModeFlagBitsKHR::eDefault | VideoEncodeRateControlModeFlagBitsKHR::eDisabled | VideoEncodeRateControlModeFlagBitsKHR::eCbr | + VideoEncodeRateControlModeFlagBitsKHR::eVbr; + }; + + enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR + { + }; + + using VideoEncodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFlagsKHR allFlags = {}; + }; + + enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR + { + }; + + using VideoEncodeRateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlFlagsKHR allFlags = {}; + }; + + //=== VK_NV_device_diagnostics_config === + + enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV + { + eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV, + eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV, + eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV, + eEnableShaderErrorReporting = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV + }; + + using DeviceDiagnosticsConfigFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceDiagnosticsConfigFlagsNV allFlags = + DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo | DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking | + DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints | DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + enum class ExportMetalObjectTypeFlagBitsEXT : VkExportMetalObjectTypeFlagsEXT + { + eMetalDevice = VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT, + eMetalCommandQueue = VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT, + eMetalBuffer = VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT, + eMetalTexture = VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT, + eMetalIosurface = VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT, + eMetalSharedEvent = VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT + }; + + using ExportMetalObjectTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExportMetalObjectTypeFlagsEXT allFlags = + ExportMetalObjectTypeFlagBitsEXT::eMetalDevice | ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue | ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer | + ExportMetalObjectTypeFlagBitsEXT::eMetalTexture | ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface | ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + + enum class GraphicsPipelineLibraryFlagBitsEXT : VkGraphicsPipelineLibraryFlagsEXT + { + eVertexInputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT, + ePreRasterizationShaders = VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT, + eFragmentShader = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT, + eFragmentOutputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT + }; + + using GraphicsPipelineLibraryFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GraphicsPipelineLibraryFlagsEXT allFlags = + GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface | GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders | + GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader | GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface; + }; + + //=== VK_NV_fragment_shading_rate_enums === + + enum class FragmentShadingRateNV + { + e1InvocationPerPixel = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV, + e2InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV, + e16InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV, + eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV + }; + + enum class FragmentShadingRateTypeNV + { + eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV, + eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV + }; + + //=== VK_NV_ray_tracing_motion_blur === + + enum class AccelerationStructureMotionInstanceTypeNV + { + eStatic = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV, + eMatrixMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV, + eSrtMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV + }; + + enum class AccelerationStructureMotionInfoFlagBitsNV : VkAccelerationStructureMotionInfoFlagsNV + { + }; + + using AccelerationStructureMotionInfoFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInfoFlagsNV allFlags = {}; + }; + + enum class AccelerationStructureMotionInstanceFlagBitsNV : VkAccelerationStructureMotionInstanceFlagsNV + { + }; + + using AccelerationStructureMotionInstanceFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInstanceFlagsNV allFlags = {}; + }; + + //=== VK_EXT_image_compression_control === + + enum class ImageCompressionFlagBitsEXT : VkImageCompressionFlagsEXT + { + eDefault = VK_IMAGE_COMPRESSION_DEFAULT_EXT, + eFixedRateDefault = VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT, + eFixedRateExplicit = VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT, + eDisabled = VK_IMAGE_COMPRESSION_DISABLED_EXT + }; + + using ImageCompressionFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFlagsEXT allFlags = + ImageCompressionFlagBitsEXT::eDefault | ImageCompressionFlagBitsEXT::eFixedRateDefault | ImageCompressionFlagBitsEXT::eFixedRateExplicit | + ImageCompressionFlagBitsEXT::eDisabled; + }; + + enum class ImageCompressionFixedRateFlagBitsEXT : VkImageCompressionFixedRateFlagsEXT + { + eNone = VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT, + e1Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT, + e2Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT, + e3Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT, + e4Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT, + e5Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT, + e6Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT, + e7Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT, + e8Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT, + e9Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT, + e10Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT, + e11Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT, + e12Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT, + e13Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT, + e14Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT, + e15Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT, + e16Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT, + e17Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT, + e18Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT, + e19Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT, + e20Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT, + e21Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT, + e22Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT, + e23Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT, + e24Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT + }; + + using ImageCompressionFixedRateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFixedRateFlagsEXT allFlags = + ImageCompressionFixedRateFlagBitsEXT::eNone | ImageCompressionFixedRateFlagBitsEXT::e1Bpc | ImageCompressionFixedRateFlagBitsEXT::e2Bpc | + ImageCompressionFixedRateFlagBitsEXT::e3Bpc | ImageCompressionFixedRateFlagBitsEXT::e4Bpc | ImageCompressionFixedRateFlagBitsEXT::e5Bpc | + ImageCompressionFixedRateFlagBitsEXT::e6Bpc | ImageCompressionFixedRateFlagBitsEXT::e7Bpc | ImageCompressionFixedRateFlagBitsEXT::e8Bpc | + ImageCompressionFixedRateFlagBitsEXT::e9Bpc | ImageCompressionFixedRateFlagBitsEXT::e10Bpc | ImageCompressionFixedRateFlagBitsEXT::e11Bpc | + ImageCompressionFixedRateFlagBitsEXT::e12Bpc | ImageCompressionFixedRateFlagBitsEXT::e13Bpc | ImageCompressionFixedRateFlagBitsEXT::e14Bpc | + ImageCompressionFixedRateFlagBitsEXT::e15Bpc | ImageCompressionFixedRateFlagBitsEXT::e16Bpc | ImageCompressionFixedRateFlagBitsEXT::e17Bpc | + ImageCompressionFixedRateFlagBitsEXT::e18Bpc | ImageCompressionFixedRateFlagBitsEXT::e19Bpc | ImageCompressionFixedRateFlagBitsEXT::e20Bpc | + ImageCompressionFixedRateFlagBitsEXT::e21Bpc | ImageCompressionFixedRateFlagBitsEXT::e22Bpc | ImageCompressionFixedRateFlagBitsEXT::e23Bpc | + ImageCompressionFixedRateFlagBitsEXT::e24Bpc; + }; + + //=== VK_EXT_device_fault === + + enum class DeviceFaultAddressTypeEXT + { + eNone = VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT, + eReadInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT, + eWriteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT, + eExecuteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT, + eInstructionPointerUnknown = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT, + eInstructionPointerInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT, + eInstructionPointerFault = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT + }; + + enum class DeviceFaultVendorBinaryHeaderVersionEXT + { + eOne = VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT + }; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + enum class DirectFBSurfaceCreateFlagBitsEXT : VkDirectFBSurfaceCreateFlagsEXT + { + }; + + using DirectFBSurfaceCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DirectFBSurfaceCreateFlagsEXT allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + + enum class DeviceAddressBindingFlagBitsEXT : VkDeviceAddressBindingFlagsEXT + { + eInternalObject = VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT + }; + + using DeviceAddressBindingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceAddressBindingFlagsEXT allFlags = DeviceAddressBindingFlagBitsEXT::eInternalObject; + }; + + enum class DeviceAddressBindingTypeEXT + { + eBind = VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT, + eUnbind = VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + enum class ImageConstraintsInfoFlagBitsFUCHSIA : VkImageConstraintsInfoFlagsFUCHSIA + { + eCpuReadRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA, + eCpuReadOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA, + eCpuWriteRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA, + eCpuWriteOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA, + eProtectedOptional = VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA + }; + + using ImageConstraintsInfoFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA allFlags = + ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften | + ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften | + ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional; + }; + + enum class ImageFormatConstraintsFlagBitsFUCHSIA : VkImageFormatConstraintsFlagsFUCHSIA + { + }; + + using ImageFormatConstraintsFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageFormatConstraintsFlagsFUCHSIA allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_frame_boundary === + + enum class FrameBoundaryFlagBitsEXT : VkFrameBoundaryFlagsEXT + { + eFrameEnd = VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT + }; + + using FrameBoundaryFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FrameBoundaryFlagsEXT allFlags = FrameBoundaryFlagBitsEXT::eFrameEnd; + }; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + enum class ScreenSurfaceCreateFlagBitsQNX : VkScreenSurfaceCreateFlagsQNX + { + }; + + using ScreenSurfaceCreateFlagsQNX = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ScreenSurfaceCreateFlagsQNX allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + + enum class MicromapTypeEXT + { + eOpacityMicromap = VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eDisplacementMicromapNV = VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + }; + + enum class BuildMicromapFlagBitsEXT : VkBuildMicromapFlagsEXT + { + ePreferFastTrace = VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT, + ePreferFastBuild = VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT, + eAllowCompaction = VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT + }; + + using BuildMicromapFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BuildMicromapFlagsEXT allFlags = + BuildMicromapFlagBitsEXT::ePreferFastTrace | BuildMicromapFlagBitsEXT::ePreferFastBuild | BuildMicromapFlagBitsEXT::eAllowCompaction; + }; + + enum class CopyMicromapModeEXT + { + eClone = VK_COPY_MICROMAP_MODE_CLONE_EXT, + eSerialize = VK_COPY_MICROMAP_MODE_SERIALIZE_EXT, + eDeserialize = VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT, + eCompact = VK_COPY_MICROMAP_MODE_COMPACT_EXT + }; + + enum class MicromapCreateFlagBitsEXT : VkMicromapCreateFlagsEXT + { + eDeviceAddressCaptureReplay = VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT + }; + + using MicromapCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MicromapCreateFlagsEXT allFlags = MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay; + }; + + enum class BuildMicromapModeEXT + { + eBuild = VK_BUILD_MICROMAP_MODE_BUILD_EXT + }; + + enum class OpacityMicromapFormatEXT + { + e2State = VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT, + e4State = VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT + }; + + enum class OpacityMicromapSpecialIndexEXT + { + eFullyTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT, + eFullyOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT, + eFullyUnknownTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT, + eFullyUnknownOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + + enum class DisplacementMicromapFormatNV + { + e64Triangles64Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV, + e256Triangles128Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV, + e1024Triangles128Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_ARM_scheduling_controls === + + enum class PhysicalDeviceSchedulingControlsFlagBitsARM : VkPhysicalDeviceSchedulingControlsFlagsARM + { + eShaderCoreCount = VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM + }; + + using PhysicalDeviceSchedulingControlsFlagsARM = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PhysicalDeviceSchedulingControlsFlagsARM allFlags = PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount; + }; + + //=== VK_NV_memory_decompression === + + enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV + { + eGdeflate10 = VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV + }; + + using MemoryDecompressionMethodFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryDecompressionMethodFlagsNV allFlags = MemoryDecompressionMethodFlagBitsNV::eGdeflate10; + }; + + //=== VK_EXT_subpass_merge_feedback === + + enum class SubpassMergeStatusEXT + { + eMerged = VK_SUBPASS_MERGE_STATUS_MERGED_EXT, + eDisallowed = VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT, + eNotMergedSideEffects = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT, + eNotMergedSamplesMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT, + eNotMergedViewsMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT, + eNotMergedAliasing = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT, + eNotMergedDependencies = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT, + eNotMergedIncompatibleInputAttachment = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT, + eNotMergedTooManyAttachments = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT, + eNotMergedInsufficientStorage = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT, + eNotMergedDepthStencilCount = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT, + eNotMergedResolveAttachmentReuse = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT, + eNotMergedSingleSubpass = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT, + eNotMergedUnspecified = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT + }; + + //=== VK_LUNARG_direct_driver_loading === + + enum class DirectDriverLoadingModeLUNARG + { + eExclusive = VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG, + eInclusive = VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG + }; + + enum class DirectDriverLoadingFlagBitsLUNARG : VkDirectDriverLoadingFlagsLUNARG + { + }; + + using DirectDriverLoadingFlagsLUNARG = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DirectDriverLoadingFlagsLUNARG allFlags = {}; + }; + + //=== VK_NV_optical_flow === + + enum class OpticalFlowUsageFlagBitsNV : VkOpticalFlowUsageFlagsNV + { + eUnknown = VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV, + eInput = VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV, + eOutput = VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV, + eHint = VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV, + eCost = VK_OPTICAL_FLOW_USAGE_COST_BIT_NV, + eGlobalFlow = VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV + }; + + using OpticalFlowUsageFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowUsageFlagsNV allFlags = OpticalFlowUsageFlagBitsNV::eUnknown | OpticalFlowUsageFlagBitsNV::eInput | + OpticalFlowUsageFlagBitsNV::eOutput | OpticalFlowUsageFlagBitsNV::eHint | + OpticalFlowUsageFlagBitsNV::eCost | OpticalFlowUsageFlagBitsNV::eGlobalFlow; + }; + + enum class OpticalFlowGridSizeFlagBitsNV : VkOpticalFlowGridSizeFlagsNV + { + eUnknown = VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV, + e1X1 = VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV, + e2X2 = VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV, + e4X4 = VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV, + e8X8 = VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV + }; + + using OpticalFlowGridSizeFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowGridSizeFlagsNV allFlags = OpticalFlowGridSizeFlagBitsNV::eUnknown | OpticalFlowGridSizeFlagBitsNV::e1X1 | + OpticalFlowGridSizeFlagBitsNV::e2X2 | OpticalFlowGridSizeFlagBitsNV::e4X4 | + OpticalFlowGridSizeFlagBitsNV::e8X8; + }; + + enum class OpticalFlowPerformanceLevelNV + { + eUnknown = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV, + eSlow = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV, + eMedium = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV, + eFast = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV + }; + + enum class OpticalFlowSessionBindingPointNV + { + eUnknown = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV, + eInput = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV, + eReference = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV, + eHint = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV, + eFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV, + eBackwardFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV, + eCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV, + eBackwardCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV, + eGlobalFlow = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV + }; + + enum class OpticalFlowSessionCreateFlagBitsNV : VkOpticalFlowSessionCreateFlagsNV + { + eEnableHint = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV, + eEnableCost = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV, + eEnableGlobalFlow = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV, + eAllowRegions = VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV, + eBothDirections = VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV + }; + + using OpticalFlowSessionCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowSessionCreateFlagsNV allFlags = + OpticalFlowSessionCreateFlagBitsNV::eEnableHint | OpticalFlowSessionCreateFlagBitsNV::eEnableCost | + OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow | OpticalFlowSessionCreateFlagBitsNV::eAllowRegions | + OpticalFlowSessionCreateFlagBitsNV::eBothDirections; + }; + + enum class OpticalFlowExecuteFlagBitsNV : VkOpticalFlowExecuteFlagsNV + { + eDisableTemporalHints = VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV + }; + + using OpticalFlowExecuteFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints; + }; + + //=== VK_KHR_maintenance5 === + + enum class PipelineCreateFlagBits2KHR : VkPipelineCreateFlags2KHR + { + eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, + eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, + eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, + eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR, + eDeferCompileNV = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV, + eCaptureStatistics = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentations = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR, + eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR, + eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT, + eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, + eLibrary = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR, + eRayTracingSkipTriangles = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbs = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingNoNullAnyHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplay = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV, + eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV, + eRenderingFragmentShadingRateAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, + eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT, + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, + eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT + }; + + using PipelineCreateFlags2KHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags = + PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative | + PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompileNV | + PipelineCreateFlagBits2KHR::eCaptureStatistics | PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | + PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | + PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT | PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT | + PipelineCreateFlagBits2KHR::eLibrary | PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs | + PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders | + PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders | + PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay | PipelineCreateFlagBits2KHR::eIndirectBindableNV | + PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV | PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment | + PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT | + PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT | PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT | + PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2KHR::eDescriptorBufferEXT; + }; + + enum class BufferUsageFlagBits2KHR : VkBufferUsageFlags2KHR + { + eTransferSrc = VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR, + eTransferDst = VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR, + eUniformTexelBuffer = VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR, + eStorageTexelBuffer = VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR, + eUniformBuffer = VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR, + eStorageBuffer = VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR, + eIndexBuffer = VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR, + eVertexBuffer = VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR, + eIndirectBuffer = VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eConditionalRenderingEXT = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eShaderBindingTable = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eVideoDecodeSrc = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDst = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDst = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrc = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR, + eAccelerationStructureBuildInputReadOnly = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorage = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, + eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, + ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, + eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, + eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT + }; + + using BufferUsageFlags2KHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags2KHR allFlags = + BufferUsageFlagBits2KHR::eTransferSrc | BufferUsageFlagBits2KHR::eTransferDst | BufferUsageFlagBits2KHR::eUniformTexelBuffer | + BufferUsageFlagBits2KHR::eStorageTexelBuffer | BufferUsageFlagBits2KHR::eUniformBuffer | BufferUsageFlagBits2KHR::eStorageBuffer | + BufferUsageFlagBits2KHR::eIndexBuffer | BufferUsageFlagBits2KHR::eVertexBuffer | BufferUsageFlagBits2KHR::eIndirectBuffer +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits2KHR::eConditionalRenderingEXT | BufferUsageFlagBits2KHR::eShaderBindingTable | + BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT | BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT | + BufferUsageFlagBits2KHR::eVideoDecodeSrc | BufferUsageFlagBits2KHR::eVideoDecodeDst +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits2KHR::eVideoEncodeDst | BufferUsageFlagBits2KHR::eVideoEncodeSrc +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits2KHR::eShaderDeviceAddress | BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly | + BufferUsageFlagBits2KHR::eAccelerationStructureStorage | BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT | + BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT | BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT | + BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits2KHR::eMicromapStorageEXT; + }; + + //=== VK_EXT_shader_object === + + enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT + { + eLinkStage = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT, + eAllowVaryingSubgroupSize = VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroups = VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT, + eNoTaskShader = VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT, + eDispatchBase = VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT, + eFragmentShadingRateAttachment = VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT, + eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT + }; + + using ShaderCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCreateFlagsEXT allFlags = + ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups | + ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment | + ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment; + }; + + enum class ShaderCodeTypeEXT + { + eBinary = VK_SHADER_CODE_TYPE_BINARY_EXT, + eSpirv = VK_SHADER_CODE_TYPE_SPIRV_EXT + }; + + //=== VK_NV_ray_tracing_invocation_reorder === + + enum class RayTracingInvocationReorderModeNV + { + eNone = VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV, + eReorder = VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV + }; + + //=== VK_EXT_layer_settings === + + enum class LayerSettingTypeEXT + { + eBool32 = VK_LAYER_SETTING_TYPE_BOOL32_EXT, + eInt32 = VK_LAYER_SETTING_TYPE_INT32_EXT, + eInt64 = VK_LAYER_SETTING_TYPE_INT64_EXT, + eUint32 = VK_LAYER_SETTING_TYPE_UINT32_EXT, + eUint64 = VK_LAYER_SETTING_TYPE_UINT64_EXT, + eFloat32 = VK_LAYER_SETTING_TYPE_FLOAT32_EXT, + eFloat64 = VK_LAYER_SETTING_TYPE_FLOAT64_EXT, + eString = VK_LAYER_SETTING_TYPE_STRING_EXT + }; + + //=== VK_NV_low_latency2 === + + enum class LatencyMarkerNV + { + eSimulationStart = VK_LATENCY_MARKER_SIMULATION_START_NV, + eSimulationEnd = VK_LATENCY_MARKER_SIMULATION_END_NV, + eRendersubmitStart = VK_LATENCY_MARKER_RENDERSUBMIT_START_NV, + eRendersubmitEnd = VK_LATENCY_MARKER_RENDERSUBMIT_END_NV, + ePresentStart = VK_LATENCY_MARKER_PRESENT_START_NV, + ePresentEnd = VK_LATENCY_MARKER_PRESENT_END_NV, + eInputSample = VK_LATENCY_MARKER_INPUT_SAMPLE_NV, + eTriggerFlash = VK_LATENCY_MARKER_TRIGGER_FLASH_NV, + eOutOfBandRendersubmitStart = VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV, + eOutOfBandRendersubmitEnd = VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV, + eOutOfBandPresentStart = VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV, + eOutOfBandPresentEnd = VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV + }; + + enum class OutOfBandQueueTypeNV + { + eRender = VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV, + ePresent = VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV + }; + + //=== VK_KHR_cooperative_matrix === + + enum class ScopeKHR + { + eDevice = VK_SCOPE_DEVICE_KHR, + eWorkgroup = VK_SCOPE_WORKGROUP_KHR, + eSubgroup = VK_SCOPE_SUBGROUP_KHR, + eQueueFamily = VK_SCOPE_QUEUE_FAMILY_KHR + }; + using ScopeNV = ScopeKHR; + + enum class ComponentTypeKHR + { + eFloat16 = VK_COMPONENT_TYPE_FLOAT16_KHR, + eFloat32 = VK_COMPONENT_TYPE_FLOAT32_KHR, + eFloat64 = VK_COMPONENT_TYPE_FLOAT64_KHR, + eSint8 = VK_COMPONENT_TYPE_SINT8_KHR, + eSint16 = VK_COMPONENT_TYPE_SINT16_KHR, + eSint32 = VK_COMPONENT_TYPE_SINT32_KHR, + eSint64 = VK_COMPONENT_TYPE_SINT64_KHR, + eUint8 = VK_COMPONENT_TYPE_UINT8_KHR, + eUint16 = VK_COMPONENT_TYPE_UINT16_KHR, + eUint32 = VK_COMPONENT_TYPE_UINT32_KHR, + eUint64 = VK_COMPONENT_TYPE_UINT64_KHR + }; + using ComponentTypeNV = ComponentTypeKHR; + + //=== VK_QCOM_image_processing2 === + + enum class BlockMatchWindowCompareModeQCOM + { + eMin = VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM, + eMax = VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM + }; + + //=== VK_QCOM_filter_cubic_weights === + + enum class CubicFilterWeightsQCOM + { + eCatmullRom = VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM, + eZeroTangentCardinal = VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM, + eBSpline = VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM, + eMitchellNetravali = VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM + }; + + //=== VK_MSFT_layered_driver === + + enum class LayeredDriverUnderlyingApiMSFT + { + eNone = VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT, + eD3D12 = VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT + }; + + //=== VK_KHR_calibrated_timestamps === + + enum class TimeDomainKHR + { + eDevice = VK_TIME_DOMAIN_DEVICE_KHR, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR, + eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR + }; + using TimeDomainEXT = TimeDomainKHR; + + //========================= + //=== Index Type Traits === + //========================= + + template + struct IndexTypeValue + { + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; + }; + + template <> + struct CppType + { + using Type = uint16_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT; + }; + + template <> + struct CppType + { + using Type = uint8_t; + }; + + //=========================================================== + //=== Mapping from ObjectType to DebugReportObjectTypeEXT === + //=========================================================== + + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType ) + { + switch ( objectType ) + { + //=== VK_VERSION_1_0 === + case VULKAN_HPP_NAMESPACE::ObjectType::eInstance: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + case VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + case VULKAN_HPP_NAMESPACE::ObjectType::eDevice: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + case VULKAN_HPP_NAMESPACE::ObjectType::eQueue: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + case VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + case VULKAN_HPP_NAMESPACE::ObjectType::eFence: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + case VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + case VULKAN_HPP_NAMESPACE::ObjectType::eEvent: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + case VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eBuffer: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + case VULKAN_HPP_NAMESPACE::ObjectType::eBufferView: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + case VULKAN_HPP_NAMESPACE::ObjectType::eImage: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + case VULKAN_HPP_NAMESPACE::ObjectType::eImageView: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + case VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipeline: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + case VULKAN_HPP_NAMESPACE::ObjectType::eSampler: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + case VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + case VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + case VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + //=== VK_VERSION_1_1 === + case VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + //=== VK_VERSION_1_3 === + case VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_surface === + case VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + //=== VK_KHR_swapchain === + case VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + //=== VK_KHR_display === + case VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + case VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + //=== VK_EXT_debug_report === + case VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + case VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + case VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NVX_binary_import === + case VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + case VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + //=== VK_EXT_debug_utils === + case VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_acceleration_structure === + case VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + case VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + //=== VK_NV_ray_tracing === + case VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + //=== VK_INTEL_performance_query === + case VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_deferred_host_operations === + case VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NV_device_generated_commands === + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + case VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; + case VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + case VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + case VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NV_optical_flow === + case VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_EXT_shader_object === + case VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + default: VULKAN_HPP_ASSERT( false && "unknown ObjectType" ); return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + } + } + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_extension_inspection.hpp b/MacroLibX/third_party/vulkan/vulkan_extension_inspection.hpp new file mode 100644 index 0000000..068fe56 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_extension_inspection.hpp @@ -0,0 +1,2918 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_EXTENSION_INSPECTION_HPP +#define VULKAN_EXTENSION_INSPECTION_HPP + +#include +#include +#include +#include +#include + +namespace VULKAN_HPP_NAMESPACE +{ + //====================================== + //=== Extension inspection functions === + //====================================== + + std::set const & getDeviceExtensions(); + std::set const & getInstanceExtensions(); + std::map const & getDeprecatedExtensions(); + std::map>> const & getExtensionDepends( std::string const & extension ); + std::pair> const &> getExtensionDepends( std::string const & version, std::string const & extension ); + std::map const & getObsoletedExtensions(); + std::map const & getPromotedExtensions(); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionObsoletedBy( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionPromotedTo( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isDeprecatedExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isObsoletedExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isPromotedExtension( std::string const & extension ); + + //===================================================== + //=== Extension inspection function implementations === + //===================================================== + + VULKAN_HPP_INLINE std::map const & getDeprecatedExtensions() + { + static std::map deprecatedExtensions = { + { "VK_EXT_debug_report", "VK_EXT_debug_utils" }, + { "VK_NV_glsl_shader", "" }, + { "VK_NV_dedicated_allocation", "VK_KHR_dedicated_allocation" }, + { "VK_AMD_gpu_shader_half_float", "VK_KHR_shader_float16_int8" }, + { "VK_IMG_format_pvrtc", "" }, + { "VK_NV_external_memory_capabilities", "VK_KHR_external_memory_capabilities" }, + { "VK_NV_external_memory", "VK_KHR_external_memory" }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_external_memory_win32", "VK_KHR_external_memory_win32" }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_validation_flags", "VK_EXT_layer_settings" }, + { "VK_EXT_shader_subgroup_ballot", "VK_VERSION_1_2" }, + { "VK_EXT_shader_subgroup_vote", "VK_VERSION_1_1" }, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + { "VK_MVK_ios_surface", "VK_EXT_metal_surface" }, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + { "VK_MVK_macos_surface", "VK_EXT_metal_surface" }, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + { "VK_AMD_gpu_shader_int16", "VK_KHR_shader_float16_int8" }, + { "VK_EXT_buffer_device_address", "VK_KHR_buffer_device_address" }, + { "VK_EXT_validation_features", "VK_EXT_layer_settings" } + }; + return deprecatedExtensions; + } + + VULKAN_HPP_INLINE std::set const & getDeviceExtensions() + { + static std::set deviceExtensions = { + "VK_KHR_swapchain", + "VK_KHR_display_swapchain", + "VK_NV_glsl_shader", + "VK_EXT_depth_range_unrestricted", + "VK_KHR_sampler_mirror_clamp_to_edge", + "VK_IMG_filter_cubic", + "VK_AMD_rasterization_order", + "VK_AMD_shader_trinary_minmax", + "VK_AMD_shader_explicit_vertex_parameter", + "VK_EXT_debug_marker", + "VK_KHR_video_queue", + "VK_KHR_video_decode_queue", + "VK_AMD_gcn_shader", + "VK_NV_dedicated_allocation", + "VK_EXT_transform_feedback", + "VK_NVX_binary_import", + "VK_NVX_image_view_handle", + "VK_AMD_draw_indirect_count", + "VK_AMD_negative_viewport_height", + "VK_AMD_gpu_shader_half_float", + "VK_AMD_shader_ballot", + "VK_KHR_video_encode_h264", + "VK_KHR_video_encode_h265", + "VK_KHR_video_decode_h264", + "VK_AMD_texture_gather_bias_lod", + "VK_AMD_shader_info", + "VK_KHR_dynamic_rendering", + "VK_AMD_shader_image_load_store_lod", + "VK_NV_corner_sampled_image", + "VK_KHR_multiview", + "VK_IMG_format_pvrtc", + "VK_NV_external_memory", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_NV_external_memory_win32", + "VK_NV_win32_keyed_mutex", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_device_group", + "VK_KHR_shader_draw_parameters", + "VK_EXT_shader_subgroup_ballot", + "VK_EXT_shader_subgroup_vote", + "VK_EXT_texture_compression_astc_hdr", + "VK_EXT_astc_decode_mode", + "VK_EXT_pipeline_robustness", + "VK_KHR_maintenance1", + "VK_KHR_external_memory", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_memory_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_memory_fd", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_win32_keyed_mutex", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_semaphore", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_semaphore_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_semaphore_fd", + "VK_KHR_push_descriptor", + "VK_EXT_conditional_rendering", + "VK_KHR_shader_float16_int8", + "VK_KHR_16bit_storage", + "VK_KHR_incremental_present", + "VK_KHR_descriptor_update_template", + "VK_NV_clip_space_w_scaling", + "VK_EXT_display_control", + "VK_GOOGLE_display_timing", + "VK_NV_sample_mask_override_coverage", + "VK_NV_geometry_shader_passthrough", + "VK_NV_viewport_array2", + "VK_NVX_multiview_per_view_attributes", + "VK_NV_viewport_swizzle", + "VK_EXT_discard_rectangles", + "VK_EXT_conservative_rasterization", + "VK_EXT_depth_clip_enable", + "VK_EXT_hdr_metadata", + "VK_KHR_imageless_framebuffer", + "VK_KHR_create_renderpass2", + "VK_IMG_relaxed_line_rasterization", + "VK_KHR_shared_presentable_image", + "VK_KHR_external_fence", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_fence_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_fence_fd", + "VK_KHR_performance_query", + "VK_KHR_maintenance2", + "VK_KHR_variable_pointers", + "VK_EXT_external_memory_dma_buf", + "VK_EXT_queue_family_foreign", + "VK_KHR_dedicated_allocation", +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_ANDROID_external_memory_android_hardware_buffer", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + "VK_EXT_sampler_filter_minmax", + "VK_KHR_storage_buffer_storage_class", + "VK_AMD_gpu_shader_int16", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_AMDX_shader_enqueue", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_AMD_mixed_attachment_samples", + "VK_AMD_shader_fragment_mask", + "VK_EXT_inline_uniform_block", + "VK_EXT_shader_stencil_export", + "VK_EXT_sample_locations", + "VK_KHR_relaxed_block_layout", + "VK_KHR_get_memory_requirements2", + "VK_KHR_image_format_list", + "VK_EXT_blend_operation_advanced", + "VK_NV_fragment_coverage_to_color", + "VK_KHR_acceleration_structure", + "VK_KHR_ray_tracing_pipeline", + "VK_KHR_ray_query", + "VK_NV_framebuffer_mixed_samples", + "VK_NV_fill_rectangle", + "VK_NV_shader_sm_builtins", + "VK_EXT_post_depth_coverage", + "VK_KHR_sampler_ycbcr_conversion", + "VK_KHR_bind_memory2", + "VK_EXT_image_drm_format_modifier", + "VK_EXT_validation_cache", + "VK_EXT_descriptor_indexing", + "VK_EXT_shader_viewport_index_layer", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_KHR_portability_subset", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_NV_shading_rate_image", + "VK_NV_ray_tracing", + "VK_NV_representative_fragment_test", + "VK_KHR_maintenance3", + "VK_KHR_draw_indirect_count", + "VK_EXT_filter_cubic", + "VK_QCOM_render_pass_shader_resolve", + "VK_EXT_global_priority", + "VK_KHR_shader_subgroup_extended_types", + "VK_KHR_8bit_storage", + "VK_EXT_external_memory_host", + "VK_AMD_buffer_marker", + "VK_KHR_shader_atomic_int64", + "VK_KHR_shader_clock", + "VK_AMD_pipeline_compiler_control", + "VK_EXT_calibrated_timestamps", + "VK_AMD_shader_core_properties", + "VK_KHR_video_decode_h265", + "VK_KHR_global_priority", + "VK_AMD_memory_overallocation_behavior", + "VK_EXT_vertex_attribute_divisor", +#if defined( VK_USE_PLATFORM_GGP ) + "VK_GGP_frame_token", +#endif /*VK_USE_PLATFORM_GGP*/ + "VK_EXT_pipeline_creation_feedback", + "VK_KHR_driver_properties", + "VK_KHR_shader_float_controls", + "VK_NV_shader_subgroup_partitioned", + "VK_KHR_depth_stencil_resolve", + "VK_KHR_swapchain_mutable_format", + "VK_NV_compute_shader_derivatives", + "VK_NV_mesh_shader", + "VK_NV_fragment_shader_barycentric", + "VK_NV_shader_image_footprint", + "VK_NV_scissor_exclusive", + "VK_NV_device_diagnostic_checkpoints", + "VK_KHR_timeline_semaphore", + "VK_INTEL_shader_integer_functions2", + "VK_INTEL_performance_query", + "VK_KHR_vulkan_memory_model", + "VK_EXT_pci_bus_info", + "VK_AMD_display_native_hdr", + "VK_KHR_shader_terminate_invocation", + "VK_EXT_fragment_density_map", + "VK_EXT_scalar_block_layout", + "VK_GOOGLE_hlsl_functionality1", + "VK_GOOGLE_decorate_string", + "VK_EXT_subgroup_size_control", + "VK_KHR_fragment_shading_rate", + "VK_AMD_shader_core_properties2", + "VK_AMD_device_coherent_memory", + "VK_EXT_shader_image_atomic_int64", + "VK_KHR_spirv_1_4", + "VK_EXT_memory_budget", + "VK_EXT_memory_priority", + "VK_NV_dedicated_allocation_image_aliasing", + "VK_KHR_separate_depth_stencil_layouts", + "VK_EXT_buffer_device_address", + "VK_EXT_tooling_info", + "VK_EXT_separate_stencil_usage", + "VK_KHR_present_wait", + "VK_NV_cooperative_matrix", + "VK_NV_coverage_reduction_mode", + "VK_EXT_fragment_shader_interlock", + "VK_EXT_ycbcr_image_arrays", + "VK_KHR_uniform_buffer_standard_layout", + "VK_EXT_provoking_vertex", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_EXT_full_screen_exclusive", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_buffer_device_address", + "VK_EXT_line_rasterization", + "VK_EXT_shader_atomic_float", + "VK_EXT_host_query_reset", + "VK_EXT_index_type_uint8", + "VK_EXT_extended_dynamic_state", + "VK_KHR_deferred_host_operations", + "VK_KHR_pipeline_executable_properties", + "VK_EXT_host_image_copy", + "VK_KHR_map_memory2", + "VK_EXT_shader_atomic_float2", + "VK_EXT_swapchain_maintenance1", + "VK_EXT_shader_demote_to_helper_invocation", + "VK_NV_device_generated_commands", + "VK_NV_inherited_viewport_scissor", + "VK_KHR_shader_integer_dot_product", + "VK_EXT_texel_buffer_alignment", + "VK_QCOM_render_pass_transform", + "VK_EXT_depth_bias_control", + "VK_EXT_device_memory_report", + "VK_EXT_robustness2", + "VK_EXT_custom_border_color", + "VK_GOOGLE_user_type", + "VK_KHR_pipeline_library", + "VK_NV_present_barrier", + "VK_KHR_shader_non_semantic_info", + "VK_KHR_present_id", + "VK_EXT_private_data", + "VK_EXT_pipeline_creation_cache_control", + "VK_KHR_video_encode_queue", + "VK_NV_device_diagnostics_config", + "VK_QCOM_render_pass_store_ops", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_NV_cuda_kernel_launch", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_NV_low_latency", +#if defined( VK_USE_PLATFORM_METAL_EXT ) + "VK_EXT_metal_objects", +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + "VK_KHR_synchronization2", + "VK_EXT_descriptor_buffer", + "VK_EXT_graphics_pipeline_library", + "VK_AMD_shader_early_and_late_fragment_tests", + "VK_KHR_fragment_shader_barycentric", + "VK_KHR_shader_subgroup_uniform_control_flow", + "VK_KHR_zero_initialize_workgroup_memory", + "VK_NV_fragment_shading_rate_enums", + "VK_NV_ray_tracing_motion_blur", + "VK_EXT_mesh_shader", + "VK_EXT_ycbcr_2plane_444_formats", + "VK_EXT_fragment_density_map2", + "VK_QCOM_rotated_copy_commands", + "VK_EXT_image_robustness", + "VK_KHR_workgroup_memory_explicit_layout", + "VK_KHR_copy_commands2", + "VK_EXT_image_compression_control", + "VK_EXT_attachment_feedback_loop_layout", + "VK_EXT_4444_formats", + "VK_EXT_device_fault", + "VK_ARM_rasterization_order_attachment_access", + "VK_EXT_rgba10x6_formats", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_NV_acquire_winrt_display", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_VALVE_mutable_descriptor_type", + "VK_EXT_vertex_input_dynamic_state", + "VK_EXT_physical_device_drm", + "VK_EXT_device_address_binding_report", + "VK_EXT_depth_clip_control", + "VK_EXT_primitive_topology_list_restart", + "VK_KHR_format_feature_flags2", +#if defined( VK_USE_PLATFORM_FUCHSIA ) + "VK_FUCHSIA_external_memory", + "VK_FUCHSIA_external_semaphore", + "VK_FUCHSIA_buffer_collection", +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + "VK_HUAWEI_subpass_shading", + "VK_HUAWEI_invocation_mask", + "VK_NV_external_memory_rdma", + "VK_EXT_pipeline_properties", + "VK_EXT_frame_boundary", + "VK_EXT_multisampled_render_to_single_sampled", + "VK_EXT_extended_dynamic_state2", + "VK_EXT_color_write_enable", + "VK_EXT_primitives_generated_query", + "VK_KHR_ray_tracing_maintenance1", + "VK_EXT_global_priority_query", + "VK_EXT_image_view_min_lod", + "VK_EXT_multi_draw", + "VK_EXT_image_2d_view_of_3d", + "VK_EXT_shader_tile_image", + "VK_EXT_opacity_micromap", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_NV_displacement_micromap", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_EXT_load_store_op_none", + "VK_HUAWEI_cluster_culling_shader", + "VK_EXT_border_color_swizzle", + "VK_EXT_pageable_device_local_memory", + "VK_KHR_maintenance4", + "VK_ARM_shader_core_properties", + "VK_ARM_scheduling_controls", + "VK_EXT_image_sliced_view_of_3d", + "VK_VALVE_descriptor_set_host_mapping", + "VK_EXT_depth_clamp_zero_one", + "VK_EXT_non_seamless_cube_map", + "VK_ARM_render_pass_striped", + "VK_QCOM_fragment_density_map_offset", + "VK_NV_copy_memory_indirect", + "VK_NV_memory_decompression", + "VK_NV_device_generated_commands_compute", + "VK_NV_linear_color_attachment", + "VK_EXT_image_compression_control_swapchain", + "VK_QCOM_image_processing", + "VK_EXT_nested_command_buffer", + "VK_EXT_external_memory_acquire_unmodified", + "VK_EXT_extended_dynamic_state3", + "VK_EXT_subpass_merge_feedback", + "VK_EXT_shader_module_identifier", + "VK_EXT_rasterization_order_attachment_access", + "VK_NV_optical_flow", + "VK_EXT_legacy_dithering", + "VK_EXT_pipeline_protected_access", +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_ANDROID_external_format_resolve", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + "VK_KHR_maintenance5", + "VK_KHR_ray_tracing_position_fetch", + "VK_EXT_shader_object", + "VK_QCOM_tile_properties", + "VK_SEC_amigo_profiling", + "VK_QCOM_multiview_per_view_viewports", + "VK_NV_ray_tracing_invocation_reorder", + "VK_NV_extended_sparse_address_space", + "VK_EXT_mutable_descriptor_type", + "VK_ARM_shader_core_builtins", + "VK_EXT_pipeline_library_group_handles", + "VK_EXT_dynamic_rendering_unused_attachments", + "VK_NV_low_latency2", + "VK_KHR_cooperative_matrix", + "VK_QCOM_multiview_per_view_render_areas", + "VK_KHR_video_maintenance1", + "VK_NV_per_stage_descriptor_set", + "VK_QCOM_image_processing2", + "VK_QCOM_filter_cubic_weights", + "VK_QCOM_ycbcr_degamma", + "VK_QCOM_filter_cubic_clamp", + "VK_EXT_attachment_feedback_loop_dynamic_state", + "VK_KHR_vertex_attribute_divisor", +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + "VK_QNX_external_memory_screen_buffer", +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + "VK_MSFT_layered_driver", + "VK_KHR_calibrated_timestamps", + "VK_KHR_maintenance6", + "VK_NV_descriptor_pool_overallocation" + }; + return deviceExtensions; + } + + VULKAN_HPP_INLINE std::set const & getInstanceExtensions() + { + static std::set instanceExtensions = { + "VK_KHR_surface", + "VK_KHR_display", +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + "VK_KHR_xlib_surface", +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + "VK_KHR_xcb_surface", +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + "VK_KHR_wayland_surface", +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_KHR_android_surface", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_win32_surface", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_EXT_debug_report", +#if defined( VK_USE_PLATFORM_GGP ) + "VK_GGP_stream_descriptor_surface", +#endif /*VK_USE_PLATFORM_GGP*/ + "VK_NV_external_memory_capabilities", + "VK_KHR_get_physical_device_properties2", + "VK_EXT_validation_flags", +#if defined( VK_USE_PLATFORM_VI_NN ) + "VK_NN_vi_surface", +#endif /*VK_USE_PLATFORM_VI_NN*/ + "VK_KHR_device_group_creation", + "VK_KHR_external_memory_capabilities", + "VK_KHR_external_semaphore_capabilities", + "VK_EXT_direct_mode_display", +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + "VK_EXT_acquire_xlib_display", +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + "VK_EXT_display_surface_counter", + "VK_EXT_swapchain_colorspace", + "VK_KHR_external_fence_capabilities", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_get_display_properties2", +#if defined( VK_USE_PLATFORM_IOS_MVK ) + "VK_MVK_ios_surface", +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + "VK_MVK_macos_surface", +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + "VK_EXT_debug_utils", +#if defined( VK_USE_PLATFORM_FUCHSIA ) + "VK_FUCHSIA_imagepipe_surface", +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + "VK_EXT_metal_surface", +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + "VK_KHR_surface_protected_capabilities", + "VK_EXT_validation_features", + "VK_EXT_headless_surface", + "VK_EXT_surface_maintenance1", + "VK_EXT_acquire_drm_display", +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + "VK_EXT_directfb_surface", +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + "VK_QNX_screen_surface", +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + "VK_KHR_portability_enumeration", + "VK_GOOGLE_surfaceless_query", + "VK_LUNARG_direct_driver_loading", + "VK_EXT_layer_settings" + }; + return instanceExtensions; + } + + VULKAN_HPP_INLINE std::map>> const & getExtensionDepends( std::string const & extension ) + { + static std::map>> noDependencies; + static std::map>>> dependencies = { + { "VK_KHR_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_display", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_display_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_display", + } } } } }, +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + { "VK_KHR_xlib_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + { "VK_KHR_xcb_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + { "VK_KHR_wayland_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_KHR_android_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_win32_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_debug_marker", + { { "VK_VERSION_1_0", + { { + "VK_EXT_debug_report", + } } } } }, + { "VK_KHR_video_queue", + { { "VK_VERSION_1_1", + { { + "VK_KHR_synchronization2", + } } } } }, + { "VK_KHR_video_decode_queue", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_queue", + "VK_KHR_synchronization2", + } } } } }, + { "VK_EXT_transform_feedback", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_video_encode_h264", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_video_encode_h265", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_video_decode_h264", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + } } } } }, + { "VK_AMD_texture_gather_bias_lod", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_dynamic_rendering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_depth_stencil_resolve", + "VK_KHR_get_physical_device_properties2", + } } } } }, +#if defined( VK_USE_PLATFORM_GGP ) + { "VK_GGP_stream_descriptor_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_GGP*/ + { "VK_NV_corner_sampled_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_multiview", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_external_memory_win32", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory", + } } } } }, + { "VK_NV_win32_keyed_mutex", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory_win32", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_device_group", + { { "VK_VERSION_1_0", + { { + "VK_KHR_device_group_creation", + } } } } }, +#if defined( VK_USE_PLATFORM_VI_NN ) + { "VK_NN_vi_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_VI_NN*/ + { "VK_EXT_texture_compression_astc_hdr", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_astc_decode_mode", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_external_memory_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_memory_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_memory_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_win32_keyed_mutex", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_win32", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_semaphore_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_external_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_semaphore_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_semaphore_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_push_descriptor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_conditional_rendering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_shader_float16_int8", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_16bit_storage", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } } } }, + { "VK_KHR_incremental_present", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_EXT_direct_mode_display", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + { "VK_EXT_acquire_xlib_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + { "VK_EXT_display_surface_counter", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, + { "VK_EXT_display_control", + { { "VK_VERSION_1_0", + { { + "VK_EXT_display_surface_counter", + "VK_KHR_swapchain", + } } } } }, + { "VK_GOOGLE_display_timing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_NVX_multiview_per_view_attributes", + { { "VK_VERSION_1_0", + { { + "VK_KHR_multiview", + } } } } }, + { "VK_EXT_discard_rectangles", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_conservative_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_clip_enable", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_swapchain_colorspace", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_EXT_hdr_metadata", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_KHR_imageless_framebuffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance2", + "VK_KHR_image_format_list", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_create_renderpass2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_multiview", + "VK_KHR_maintenance2", + } } } } }, + { "VK_IMG_relaxed_line_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shared_presentable_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_swapchain", + "VK_KHR_get_surface_capabilities2", + } } } } }, + { "VK_KHR_external_fence_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_external_fence", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_fence_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_fence_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_performance_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_get_surface_capabilities2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_variable_pointers", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } } } }, + { "VK_KHR_get_display_properties2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + { "VK_MVK_ios_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + { "VK_MVK_macos_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + { "VK_EXT_external_memory_dma_buf", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_fd", + } } } } }, + { "VK_EXT_queue_family_foreign", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_dedicated_allocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_memory_requirements2", + } } } } }, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_ANDROID_external_memory_android_hardware_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + "VK_KHR_external_memory", + "VK_EXT_queue_family_foreign", + "VK_KHR_dedicated_allocation", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + { "VK_EXT_sampler_filter_minmax", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_AMDX_shader_enqueue", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_synchronization2", + "VK_KHR_pipeline_library", + "VK_KHR_spirv_1_4", + } } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_EXT_inline_uniform_block", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance1", + } } } } }, + { "VK_EXT_sample_locations", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_blend_operation_advanced", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_acceleration_structure", + { { "VK_VERSION_1_1", + { { + "VK_EXT_descriptor_indexing", + "VK_KHR_buffer_device_address", + "VK_KHR_deferred_host_operations", + } } } } }, + { "VK_KHR_ray_tracing_pipeline", + { { "VK_VERSION_1_0", + { { + "VK_KHR_spirv_1_4", + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_KHR_ray_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_spirv_1_4", + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_NV_shader_sm_builtins", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_sampler_ycbcr_conversion", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance1", + "VK_KHR_bind_memory2", + "VK_KHR_get_memory_requirements2", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_image_drm_format_modifier", + { { "VK_VERSION_1_0", + { { + "VK_KHR_bind_memory2", + "VK_KHR_get_physical_device_properties2", + "VK_KHR_sampler_ycbcr_conversion", + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_EXT_descriptor_indexing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance3", + } } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_KHR_portability_subset", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_NV_shading_rate_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_ray_tracing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_get_memory_requirements2", + } } } } }, + { "VK_NV_representative_fragment_test", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_maintenance3", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_shader_subgroup_extended_types", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_8bit_storage", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } } } }, + { "VK_EXT_external_memory_host", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_atomic_int64", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_shader_clock", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_calibrated_timestamps", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_AMD_shader_core_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_video_decode_h265", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + } } } } }, + { "VK_KHR_global_priority", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_vertex_attribute_divisor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_GGP ) + { "VK_GGP_frame_token", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_GGP_stream_descriptor_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_GGP*/ + { "VK_KHR_driver_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_shader_float_controls", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_shader_subgroup_partitioned", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_depth_stencil_resolve", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + } } } } }, + { "VK_KHR_swapchain_mutable_format", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_maintenance2", + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_swapchain", + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_NV_compute_shader_derivatives", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_mesh_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_fragment_shader_barycentric", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_shader_image_footprint", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_scissor_exclusive", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_device_diagnostic_checkpoints", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_timeline_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_INTEL_shader_integer_functions2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_vulkan_memory_model", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_pci_bus_info", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_AMD_display_native_hdr", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } } } }, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + { "VK_FUCHSIA_imagepipe_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + { "VK_KHR_shader_terminate_invocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + { "VK_EXT_metal_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + { "VK_EXT_fragment_density_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_scalar_block_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_subgroup_size_control", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_fragment_shading_rate", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_create_renderpass2", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_AMD_shader_core_properties2", + { { "VK_VERSION_1_0", + { { + "VK_AMD_shader_core_properties", + } } } } }, + { "VK_AMD_device_coherent_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_shader_image_atomic_int64", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_spirv_1_4", + { { "VK_VERSION_1_1", + { { + "VK_KHR_shader_float_controls", + } } } } }, + { "VK_EXT_memory_budget", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_memory_priority", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_surface_protected_capabilities", + { { "VK_VERSION_1_1", + { { + "VK_KHR_get_surface_capabilities2", + } } } } }, + { "VK_NV_dedicated_allocation_image_aliasing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_dedicated_allocation", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_separate_depth_stencil_layouts", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_create_renderpass2", + } } } } }, + { "VK_EXT_buffer_device_address", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_present_wait", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_present_id", + } } } } }, + { "VK_NV_cooperative_matrix", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_coverage_reduction_mode", + { { "VK_VERSION_1_0", + { { + "VK_NV_framebuffer_mixed_samples", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_fragment_shader_interlock", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_ycbcr_image_arrays", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_uniform_buffer_standard_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_provoking_vertex", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_EXT_full_screen_exclusive", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_surface", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_headless_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_buffer_device_address", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_device_group", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_line_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_atomic_float", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_host_query_reset", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_index_type_uint8", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_extended_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_pipeline_executable_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_host_image_copy", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_copy_commands2", + "VK_KHR_format_feature_flags2", + } } } } }, + { "VK_EXT_shader_atomic_float2", + { { "VK_VERSION_1_0", + { { + "VK_EXT_shader_atomic_float", + } } } } }, + { "VK_EXT_surface_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + "VK_KHR_get_surface_capabilities2", + } } } } }, + { "VK_EXT_swapchain_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_EXT_surface_maintenance1", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_shader_demote_to_helper_invocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_device_generated_commands", + { { "VK_VERSION_1_1", + { { + "VK_KHR_buffer_device_address", + } } } } }, + { "VK_NV_inherited_viewport_scissor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_shader_integer_dot_product", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_texel_buffer_alignment", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_QCOM_render_pass_transform", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_surface", + } } } } }, + { "VK_EXT_depth_bias_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_device_memory_report", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_acquire_drm_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, + { "VK_EXT_robustness2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_custom_border_color", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_present_barrier", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_surface", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } } } }, + { "VK_KHR_present_id", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_private_data", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_pipeline_creation_cache_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_video_encode_queue", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_queue", + "VK_KHR_synchronization2", + } } } } }, + { "VK_NV_device_diagnostics_config", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_synchronization2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_descriptor_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_buffer_device_address", + "VK_KHR_synchronization2", + "VK_EXT_descriptor_indexing", + } } } } }, + { "VK_EXT_graphics_pipeline_library", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_pipeline_library", + } } } } }, + { "VK_AMD_shader_early_and_late_fragment_tests", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_fragment_shader_barycentric", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_shader_subgroup_uniform_control_flow", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_zero_initialize_workgroup_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_fragment_shading_rate_enums", + { { "VK_VERSION_1_0", + { { + "VK_KHR_fragment_shading_rate", + } } } } }, + { "VK_NV_ray_tracing_motion_blur", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_EXT_mesh_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_spirv_1_4", + } } } } }, + { "VK_EXT_ycbcr_2plane_444_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_fragment_density_map2", + { { "VK_VERSION_1_0", + { { + "VK_EXT_fragment_density_map", + } } } } }, + { "VK_QCOM_rotated_copy_commands", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_copy_commands2", + } } } } }, + { "VK_EXT_image_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_workgroup_memory_explicit_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_copy_commands2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_compression_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_attachment_feedback_loop_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_4444_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_device_fault", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_ARM_rasterization_order_attachment_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_rgba10x6_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_acquire_winrt_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + { "VK_EXT_directfb_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + { "VK_VALVE_mutable_descriptor_type", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance3", + } } } } }, + { "VK_EXT_vertex_input_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_physical_device_drm", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_device_address_binding_report", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_EXT_debug_utils", + } } } } }, + { "VK_EXT_depth_clip_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_primitive_topology_list_restart", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_format_feature_flags2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + { "VK_FUCHSIA_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_capabilities", + "VK_KHR_external_memory", + } } } } }, + { "VK_FUCHSIA_external_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore_capabilities", + "VK_KHR_external_semaphore", + } } } } }, + { "VK_FUCHSIA_buffer_collection", + { { "VK_VERSION_1_0", + { { + "VK_FUCHSIA_external_memory", + "VK_KHR_sampler_ycbcr_conversion", + } } } } }, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + { "VK_HUAWEI_subpass_shading", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + "VK_KHR_synchronization2", + } } } } }, + { "VK_HUAWEI_invocation_mask", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + "VK_KHR_synchronization2", + } } } } }, + { "VK_NV_external_memory_rdma", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } } } }, + { "VK_EXT_pipeline_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_multisampled_render_to_single_sampled", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + "VK_KHR_depth_stencil_resolve", + } } } } }, + { "VK_EXT_extended_dynamic_state2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + { "VK_QNX_screen_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + { "VK_EXT_color_write_enable", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_primitives_generated_query", + { { "VK_VERSION_1_0", + { { + "VK_EXT_transform_feedback", + } } } } }, + { "VK_KHR_ray_tracing_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_EXT_global_priority_query", + { { "VK_VERSION_1_0", + { { + "VK_EXT_global_priority", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_image_view_min_lod", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_multi_draw", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_image_2d_view_of_3d", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance1", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_shader_tile_image", { { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_opacity_micromap", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + "VK_KHR_synchronization2", + } } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_NV_displacement_micromap", + { { "VK_VERSION_1_0", + { { + "VK_EXT_opacity_micromap", + } } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_HUAWEI_cluster_culling_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_border_color_swizzle", + { { "VK_VERSION_1_0", + { { + "VK_EXT_custom_border_color", + } } } } }, + { "VK_EXT_pageable_device_local_memory", + { { "VK_VERSION_1_0", + { { + "VK_EXT_memory_priority", + } } } } }, + { "VK_KHR_maintenance4", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_shader_core_properties", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_scheduling_controls", + { { "VK_VERSION_1_0", + { { + "VK_ARM_shader_core_builtins", + } } } } }, + { "VK_EXT_image_sliced_view_of_3d", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance1", + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_VALVE_descriptor_set_host_mapping", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_depth_clamp_zero_one", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_non_seamless_cube_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_ARM_render_pass_striped", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_synchronization2", + } } } } }, + { "VK_QCOM_fragment_density_map_offset", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_EXT_fragment_density_map", + } } } } }, + { "VK_NV_copy_memory_indirect", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_buffer_device_address", + } } } } }, + { "VK_NV_memory_decompression", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_buffer_device_address", + } } } } }, + { "VK_NV_device_generated_commands_compute", + { { "VK_VERSION_1_0", + { { + "VK_NV_device_generated_commands", + } } } } }, + { "VK_NV_linear_color_attachment", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_GOOGLE_surfaceless_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_EXT_image_compression_control_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_EXT_image_compression_control", + } } } } }, + { "VK_QCOM_image_processing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_format_feature_flags2", + } } } } }, + { "VK_EXT_nested_command_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_external_memory_acquire_unmodified", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } } } }, + { "VK_EXT_extended_dynamic_state3", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_subpass_merge_feedback", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_shader_module_identifier", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_EXT_pipeline_creation_cache_control", + } } } } }, + { "VK_EXT_rasterization_order_attachment_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_optical_flow", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_format_feature_flags2", + "VK_KHR_synchronization2", + } } } } }, + { "VK_EXT_legacy_dithering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_pipeline_protected_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_ANDROID_external_format_resolve", + { { "VK_VERSION_1_0", + { { + "VK_ANDROID_external_memory_android_hardware_buffer", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + { "VK_KHR_maintenance5", + { { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } } } }, + { "VK_KHR_ray_tracing_position_fetch", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_EXT_shader_object", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_QCOM_tile_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_SEC_amigo_profiling", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_QCOM_multiview_per_view_viewports", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_NV_ray_tracing_invocation_reorder", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_EXT_mutable_descriptor_type", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance3", + } } } } }, + { "VK_ARM_shader_core_builtins", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_EXT_pipeline_library_group_handles", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + "VK_KHR_pipeline_library", + } } } } }, + { "VK_EXT_dynamic_rendering_unused_attachments", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_NV_low_latency2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_timeline_semaphore", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_KHR_cooperative_matrix", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_video_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_queue", + } } } } }, + { "VK_NV_per_stage_descriptor_set", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance6", + } } } } }, + { "VK_QCOM_image_processing2", + { { "VK_VERSION_1_0", + { { + "VK_QCOM_image_processing", + } } } } }, + { "VK_QCOM_filter_cubic_weights", + { { "VK_VERSION_1_0", + { { + "VK_EXT_filter_cubic", + } } } } }, + { "VK_QCOM_filter_cubic_clamp", + { { "VK_VERSION_1_0", + { { + "VK_EXT_filter_cubic", + "VK_EXT_sampler_filter_minmax", + } } }, + { "VK_VERSION_1_2", + { { + "VK_EXT_filter_cubic", + } } } } }, + { "VK_EXT_attachment_feedback_loop_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_EXT_attachment_feedback_loop_layout", + } } } } }, + { "VK_KHR_vertex_attribute_divisor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + { "VK_QNX_external_memory_screen_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + "VK_KHR_external_memory", + "VK_KHR_dedicated_allocation", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_queue_family_foreign", + } } } } }, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + { "VK_MSFT_layered_driver", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_calibrated_timestamps", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_maintenance6", { { "VK_VERSION_1_0", { {} } } } }, + { "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", { {} } } } } + }; + auto depIt = dependencies.find( extension ); + return ( depIt != dependencies.end() ) ? depIt->second : noDependencies; + } + + VULKAN_HPP_INLINE std::pair> const &> getExtensionDepends( std::string const & version, + std::string const & extension ) + { +#if !defined( NDEBUG ) + static std::set versions = { "VK_VERSION_1_0", "VK_VERSION_1_1", "VK_VERSION_1_2", "VK_VERSION_1_3" }; + assert( versions.find( version ) != versions.end() ); +#endif + static std::vector> noDependencies; + + std::map>> const & dependencies = getExtensionDepends( extension ); + if ( dependencies.empty() ) + { + return { true, noDependencies }; + } + auto depIt = dependencies.lower_bound( version ); + if ( ( depIt == dependencies.end() ) || ( depIt->first != version ) ) + { + depIt = std::prev( depIt ); + } + if ( depIt == dependencies.end() ) + { + return { false, noDependencies }; + } + else + { + return { true, depIt->second }; + } + } + + VULKAN_HPP_INLINE std::map const & getObsoletedExtensions() + { + static std::map obsoletedExtensions = { { "VK_AMD_negative_viewport_height", "VK_KHR_maintenance1" } }; + return obsoletedExtensions; + } + + VULKAN_HPP_INLINE std::map const & getPromotedExtensions() + { + static std::map promotedExtensions = { + { "VK_KHR_sampler_mirror_clamp_to_edge", "VK_VERSION_1_2" }, + { "VK_EXT_debug_marker", "VK_EXT_debug_utils" }, + { "VK_AMD_draw_indirect_count", "VK_KHR_draw_indirect_count" }, + { "VK_KHR_dynamic_rendering", "VK_VERSION_1_3" }, + { "VK_KHR_multiview", "VK_VERSION_1_1" }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_win32_keyed_mutex", "VK_KHR_win32_keyed_mutex" }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_get_physical_device_properties2", "VK_VERSION_1_1" }, + { "VK_KHR_device_group", "VK_VERSION_1_1" }, + { "VK_KHR_shader_draw_parameters", "VK_VERSION_1_1" }, + { "VK_EXT_texture_compression_astc_hdr", "VK_VERSION_1_3" }, + { "VK_KHR_maintenance1", "VK_VERSION_1_1" }, + { "VK_KHR_device_group_creation", "VK_VERSION_1_1" }, + { "VK_KHR_external_memory_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_memory", "VK_VERSION_1_1" }, + { "VK_KHR_external_semaphore_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_semaphore", "VK_VERSION_1_1" }, + { "VK_KHR_shader_float16_int8", "VK_VERSION_1_2" }, + { "VK_KHR_16bit_storage", "VK_VERSION_1_1" }, + { "VK_KHR_descriptor_update_template", "VK_VERSION_1_1" }, + { "VK_KHR_imageless_framebuffer", "VK_VERSION_1_2" }, + { "VK_KHR_create_renderpass2", "VK_VERSION_1_2" }, + { "VK_KHR_external_fence_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_fence", "VK_VERSION_1_1" }, + { "VK_KHR_maintenance2", "VK_VERSION_1_1" }, + { "VK_KHR_variable_pointers", "VK_VERSION_1_1" }, + { "VK_KHR_dedicated_allocation", "VK_VERSION_1_1" }, + { "VK_EXT_sampler_filter_minmax", "VK_VERSION_1_2" }, + { "VK_KHR_storage_buffer_storage_class", "VK_VERSION_1_1" }, + { "VK_EXT_inline_uniform_block", "VK_VERSION_1_3" }, + { "VK_KHR_relaxed_block_layout", "VK_VERSION_1_1" }, + { "VK_KHR_get_memory_requirements2", "VK_VERSION_1_1" }, + { "VK_KHR_image_format_list", "VK_VERSION_1_2" }, + { "VK_KHR_sampler_ycbcr_conversion", "VK_VERSION_1_1" }, + { "VK_KHR_bind_memory2", "VK_VERSION_1_1" }, + { "VK_EXT_descriptor_indexing", "VK_VERSION_1_2" }, + { "VK_EXT_shader_viewport_index_layer", "VK_VERSION_1_2" }, + { "VK_KHR_maintenance3", "VK_VERSION_1_1" }, + { "VK_KHR_draw_indirect_count", "VK_VERSION_1_2" }, + { "VK_EXT_global_priority", "VK_KHR_global_priority" }, + { "VK_KHR_shader_subgroup_extended_types", "VK_VERSION_1_2" }, + { "VK_KHR_8bit_storage", "VK_VERSION_1_2" }, + { "VK_KHR_shader_atomic_int64", "VK_VERSION_1_2" }, + { "VK_EXT_calibrated_timestamps", "VK_KHR_calibrated_timestamps" }, + { "VK_EXT_vertex_attribute_divisor", "VK_KHR_vertex_attribute_divisor" }, + { "VK_EXT_pipeline_creation_feedback", "VK_VERSION_1_3" }, + { "VK_KHR_driver_properties", "VK_VERSION_1_2" }, + { "VK_KHR_shader_float_controls", "VK_VERSION_1_2" }, + { "VK_KHR_depth_stencil_resolve", "VK_VERSION_1_2" }, + { "VK_NV_fragment_shader_barycentric", "VK_KHR_fragment_shader_barycentric" }, + { "VK_KHR_timeline_semaphore", "VK_VERSION_1_2" }, + { "VK_KHR_vulkan_memory_model", "VK_VERSION_1_2" }, + { "VK_KHR_shader_terminate_invocation", "VK_VERSION_1_3" }, + { "VK_EXT_scalar_block_layout", "VK_VERSION_1_2" }, + { "VK_EXT_subgroup_size_control", "VK_VERSION_1_3" }, + { "VK_KHR_spirv_1_4", "VK_VERSION_1_2" }, + { "VK_KHR_separate_depth_stencil_layouts", "VK_VERSION_1_2" }, + { "VK_EXT_tooling_info", "VK_VERSION_1_3" }, + { "VK_EXT_separate_stencil_usage", "VK_VERSION_1_2" }, + { "VK_KHR_uniform_buffer_standard_layout", "VK_VERSION_1_2" }, + { "VK_KHR_buffer_device_address", "VK_VERSION_1_2" }, + { "VK_EXT_host_query_reset", "VK_VERSION_1_2" }, + { "VK_EXT_extended_dynamic_state", "VK_VERSION_1_3" }, + { "VK_EXT_shader_demote_to_helper_invocation", "VK_VERSION_1_3" }, + { "VK_KHR_shader_integer_dot_product", "VK_VERSION_1_3" }, + { "VK_EXT_texel_buffer_alignment", "VK_VERSION_1_3" }, + { "VK_KHR_shader_non_semantic_info", "VK_VERSION_1_3" }, + { "VK_EXT_private_data", "VK_VERSION_1_3" }, + { "VK_EXT_pipeline_creation_cache_control", "VK_VERSION_1_3" }, + { "VK_KHR_synchronization2", "VK_VERSION_1_3" }, + { "VK_KHR_zero_initialize_workgroup_memory", "VK_VERSION_1_3" }, + { "VK_EXT_ycbcr_2plane_444_formats", "VK_VERSION_1_3" }, + { "VK_EXT_image_robustness", "VK_VERSION_1_3" }, + { "VK_KHR_copy_commands2", "VK_VERSION_1_3" }, + { "VK_EXT_4444_formats", "VK_VERSION_1_3" }, + { "VK_ARM_rasterization_order_attachment_access", "VK_EXT_rasterization_order_attachment_access" }, + { "VK_VALVE_mutable_descriptor_type", "VK_EXT_mutable_descriptor_type" }, + { "VK_KHR_format_feature_flags2", "VK_VERSION_1_3" }, + { "VK_EXT_extended_dynamic_state2", "VK_VERSION_1_3" }, + { "VK_EXT_global_priority_query", "VK_KHR_global_priority" }, + { "VK_KHR_maintenance4", "VK_VERSION_1_3" } + }; + return promotedExtensions; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & extension ) + { + if ( extension == "VK_EXT_debug_report" ) + { + return "VK_EXT_debug_utils"; + } + if ( extension == "VK_NV_glsl_shader" ) + { + return ""; + } + if ( extension == "VK_NV_dedicated_allocation" ) + { + return "VK_KHR_dedicated_allocation"; + } + if ( extension == "VK_AMD_gpu_shader_half_float" ) + { + return "VK_KHR_shader_float16_int8"; + } + if ( extension == "VK_IMG_format_pvrtc" ) + { + return ""; + } + if ( extension == "VK_NV_external_memory_capabilities" ) + { + return "VK_KHR_external_memory_capabilities"; + } + if ( extension == "VK_NV_external_memory" ) + { + return "VK_KHR_external_memory"; + } +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + if ( extension == "VK_NV_external_memory_win32" ) + { + return "VK_KHR_external_memory_win32"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + if ( extension == "VK_EXT_validation_flags" ) + { + return "VK_EXT_layer_settings"; + } + if ( extension == "VK_EXT_shader_subgroup_ballot" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_shader_subgroup_vote" ) + { + return "VK_VERSION_1_1"; + } +#if defined( VK_USE_PLATFORM_IOS_MVK ) + if ( extension == "VK_MVK_ios_surface" ) + { + return "VK_EXT_metal_surface"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + if ( extension == "VK_MVK_macos_surface" ) + { + return "VK_EXT_metal_surface"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + if ( extension == "VK_AMD_gpu_shader_int16" ) + { + return "VK_KHR_shader_float16_int8"; + } + if ( extension == "VK_EXT_buffer_device_address" ) + { + return "VK_KHR_buffer_device_address"; + } + if ( extension == "VK_EXT_validation_features" ) + { + return "VK_EXT_layer_settings"; + } + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionObsoletedBy( std::string const & extension ) + { + if ( extension == "VK_AMD_negative_viewport_height" ) + { + return "VK_KHR_maintenance1"; + } + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionPromotedTo( std::string const & extension ) + { + if ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_debug_marker" ) + { + return "VK_EXT_debug_utils"; + } + if ( extension == "VK_AMD_draw_indirect_count" ) + { + return "VK_KHR_draw_indirect_count"; + } + if ( extension == "VK_KHR_dynamic_rendering" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_multiview" ) + { + return "VK_VERSION_1_1"; + } +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + if ( extension == "VK_NV_win32_keyed_mutex" ) + { + return "VK_KHR_win32_keyed_mutex"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + if ( extension == "VK_KHR_get_physical_device_properties2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_device_group" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_shader_draw_parameters" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_texture_compression_astc_hdr" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_maintenance1" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_device_group_creation" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_memory_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_memory" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_semaphore_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_semaphore" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_shader_float16_int8" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_16bit_storage" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_descriptor_update_template" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_imageless_framebuffer" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_create_renderpass2" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_external_fence_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_fence" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_maintenance2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_variable_pointers" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_dedicated_allocation" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_sampler_filter_minmax" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_storage_buffer_storage_class" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_inline_uniform_block" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_relaxed_block_layout" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_get_memory_requirements2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_image_format_list" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_sampler_ycbcr_conversion" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_bind_memory2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_descriptor_indexing" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_shader_viewport_index_layer" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_maintenance3" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_draw_indirect_count" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_global_priority" ) + { + return "VK_KHR_global_priority"; + } + if ( extension == "VK_KHR_shader_subgroup_extended_types" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_8bit_storage" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_atomic_int64" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_calibrated_timestamps" ) + { + return "VK_KHR_calibrated_timestamps"; + } + if ( extension == "VK_EXT_vertex_attribute_divisor" ) + { + return "VK_KHR_vertex_attribute_divisor"; + } + if ( extension == "VK_EXT_pipeline_creation_feedback" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_driver_properties" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_float_controls" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_depth_stencil_resolve" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_NV_fragment_shader_barycentric" ) + { + return "VK_KHR_fragment_shader_barycentric"; + } + if ( extension == "VK_KHR_timeline_semaphore" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_vulkan_memory_model" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_terminate_invocation" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_scalar_block_layout" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_subgroup_size_control" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_spirv_1_4" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_separate_depth_stencil_layouts" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_tooling_info" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_separate_stencil_usage" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_uniform_buffer_standard_layout" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_buffer_device_address" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_host_query_reset" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_extended_dynamic_state" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_shader_integer_dot_product" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_texel_buffer_alignment" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_shader_non_semantic_info" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_private_data" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_pipeline_creation_cache_control" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_synchronization2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_image_robustness" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_copy_commands2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_4444_formats" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_ARM_rasterization_order_attachment_access" ) + { + return "VK_EXT_rasterization_order_attachment_access"; + } + if ( extension == "VK_VALVE_mutable_descriptor_type" ) + { + return "VK_EXT_mutable_descriptor_type"; + } + if ( extension == "VK_KHR_format_feature_flags2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_extended_dynamic_state2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_global_priority_query" ) + { + return "VK_KHR_global_priority"; + } + if ( extension == "VK_KHR_maintenance4" ) + { + return "VK_VERSION_1_3"; + } + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeprecatedExtension( std::string const & extension ) + { + return ( extension == "VK_EXT_debug_report" ) || ( extension == "VK_NV_glsl_shader" ) || ( extension == "VK_NV_dedicated_allocation" ) || + ( extension == "VK_AMD_gpu_shader_half_float" ) || ( extension == "VK_IMG_format_pvrtc" ) || ( extension == "VK_NV_external_memory_capabilities" ) || + ( extension == "VK_NV_external_memory" ) || +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + ( extension == "VK_NV_external_memory_win32" ) || +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + ( extension == "VK_EXT_validation_flags" ) || ( extension == "VK_EXT_shader_subgroup_ballot" ) || ( extension == "VK_EXT_shader_subgroup_vote" ) || +#if defined( VK_USE_PLATFORM_IOS_MVK ) + ( extension == "VK_MVK_ios_surface" ) || +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + ( extension == "VK_MVK_macos_surface" ) || +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_EXT_buffer_device_address" ) || ( extension == "VK_EXT_validation_features" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_swapchain" ) || ( extension == "VK_KHR_display_swapchain" ) || ( extension == "VK_NV_glsl_shader" ) || + ( extension == "VK_EXT_depth_range_unrestricted" ) || ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) || + ( extension == "VK_IMG_filter_cubic" ) || ( extension == "VK_AMD_rasterization_order" ) || ( extension == "VK_AMD_shader_trinary_minmax" ) || + ( extension == "VK_AMD_shader_explicit_vertex_parameter" ) || ( extension == "VK_EXT_debug_marker" ) || ( extension == "VK_KHR_video_queue" ) || + ( extension == "VK_KHR_video_decode_queue" ) || ( extension == "VK_AMD_gcn_shader" ) || ( extension == "VK_NV_dedicated_allocation" ) || + ( extension == "VK_EXT_transform_feedback" ) || ( extension == "VK_NVX_binary_import" ) || ( extension == "VK_NVX_image_view_handle" ) || + ( extension == "VK_AMD_draw_indirect_count" ) || ( extension == "VK_AMD_negative_viewport_height" ) || + ( extension == "VK_AMD_gpu_shader_half_float" ) || ( extension == "VK_AMD_shader_ballot" ) || ( extension == "VK_KHR_video_encode_h264" ) || + ( extension == "VK_KHR_video_encode_h265" ) || ( extension == "VK_KHR_video_decode_h264" ) || ( extension == "VK_AMD_texture_gather_bias_lod" ) || + ( extension == "VK_AMD_shader_info" ) || ( extension == "VK_KHR_dynamic_rendering" ) || ( extension == "VK_AMD_shader_image_load_store_lod" ) || + ( extension == "VK_NV_corner_sampled_image" ) || ( extension == "VK_KHR_multiview" ) || ( extension == "VK_IMG_format_pvrtc" ) || + ( extension == "VK_NV_external_memory" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_NV_external_memory_win32" ) || ( extension == "VK_NV_win32_keyed_mutex" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_device_group" ) || ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_shader_subgroup_ballot" ) || + ( extension == "VK_EXT_shader_subgroup_vote" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) || + ( extension == "VK_EXT_astc_decode_mode" ) || ( extension == "VK_EXT_pipeline_robustness" ) || ( extension == "VK_KHR_maintenance1" ) || + ( extension == "VK_KHR_external_memory" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_memory_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_memory_fd" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_win32_keyed_mutex" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_semaphore" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_semaphore_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_semaphore_fd" ) || ( extension == "VK_KHR_push_descriptor" ) || ( extension == "VK_EXT_conditional_rendering" ) || + ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || ( extension == "VK_KHR_incremental_present" ) || + ( extension == "VK_KHR_descriptor_update_template" ) || ( extension == "VK_NV_clip_space_w_scaling" ) || ( extension == "VK_EXT_display_control" ) || + ( extension == "VK_GOOGLE_display_timing" ) || ( extension == "VK_NV_sample_mask_override_coverage" ) || + ( extension == "VK_NV_geometry_shader_passthrough" ) || ( extension == "VK_NV_viewport_array2" ) || + ( extension == "VK_NVX_multiview_per_view_attributes" ) || ( extension == "VK_NV_viewport_swizzle" ) || + ( extension == "VK_EXT_discard_rectangles" ) || ( extension == "VK_EXT_conservative_rasterization" ) || + ( extension == "VK_EXT_depth_clip_enable" ) || ( extension == "VK_EXT_hdr_metadata" ) || ( extension == "VK_KHR_imageless_framebuffer" ) || + ( extension == "VK_KHR_create_renderpass2" ) || ( extension == "VK_IMG_relaxed_line_rasterization" ) || + ( extension == "VK_KHR_shared_presentable_image" ) || ( extension == "VK_KHR_external_fence" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_fence_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_fence_fd" ) || ( extension == "VK_KHR_performance_query" ) || ( extension == "VK_KHR_maintenance2" ) || + ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_EXT_external_memory_dma_buf" ) || ( extension == "VK_EXT_queue_family_foreign" ) || + ( extension == "VK_KHR_dedicated_allocation" ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_ANDROID_external_memory_android_hardware_buffer" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + || ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) || + ( extension == "VK_AMD_gpu_shader_int16" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_AMDX_shader_enqueue" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) || + ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_EXT_shader_stencil_export" ) || ( extension == "VK_EXT_sample_locations" ) || + ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) || + ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_EXT_blend_operation_advanced" ) || + ( extension == "VK_NV_fragment_coverage_to_color" ) || ( extension == "VK_KHR_acceleration_structure" ) || + ( extension == "VK_KHR_ray_tracing_pipeline" ) || ( extension == "VK_KHR_ray_query" ) || ( extension == "VK_NV_framebuffer_mixed_samples" ) || + ( extension == "VK_NV_fill_rectangle" ) || ( extension == "VK_NV_shader_sm_builtins" ) || ( extension == "VK_EXT_post_depth_coverage" ) || + ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) || + ( extension == "VK_EXT_image_drm_format_modifier" ) || ( extension == "VK_EXT_validation_cache" ) || ( extension == "VK_EXT_descriptor_indexing" ) || + ( extension == "VK_EXT_shader_viewport_index_layer" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_KHR_portability_subset" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_NV_shading_rate_image" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_NV_representative_fragment_test" ) || + ( extension == "VK_KHR_maintenance3" ) || ( extension == "VK_KHR_draw_indirect_count" ) || ( extension == "VK_EXT_filter_cubic" ) || + ( extension == "VK_QCOM_render_pass_shader_resolve" ) || ( extension == "VK_EXT_global_priority" ) || + ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || + ( extension == "VK_EXT_external_memory_host" ) || ( extension == "VK_AMD_buffer_marker" ) || ( extension == "VK_KHR_shader_atomic_int64" ) || + ( extension == "VK_KHR_shader_clock" ) || ( extension == "VK_AMD_pipeline_compiler_control" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || + ( extension == "VK_AMD_shader_core_properties" ) || ( extension == "VK_KHR_video_decode_h265" ) || ( extension == "VK_KHR_global_priority" ) || + ( extension == "VK_AMD_memory_overallocation_behavior" ) || ( extension == "VK_EXT_vertex_attribute_divisor" ) +#if defined( VK_USE_PLATFORM_GGP ) + || ( extension == "VK_GGP_frame_token" ) +#endif /*VK_USE_PLATFORM_GGP*/ + || ( extension == "VK_EXT_pipeline_creation_feedback" ) || ( extension == "VK_KHR_driver_properties" ) || + ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_NV_shader_subgroup_partitioned" ) || + ( extension == "VK_KHR_depth_stencil_resolve" ) || ( extension == "VK_KHR_swapchain_mutable_format" ) || + ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_mesh_shader" ) || + ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_NV_shader_image_footprint" ) || + ( extension == "VK_NV_scissor_exclusive" ) || ( extension == "VK_NV_device_diagnostic_checkpoints" ) || + ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_INTEL_shader_integer_functions2" ) || + ( extension == "VK_INTEL_performance_query" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_EXT_pci_bus_info" ) || + ( extension == "VK_AMD_display_native_hdr" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) || + ( extension == "VK_EXT_fragment_density_map" ) || ( extension == "VK_EXT_scalar_block_layout" ) || + ( extension == "VK_GOOGLE_hlsl_functionality1" ) || ( extension == "VK_GOOGLE_decorate_string" ) || + ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_fragment_shading_rate" ) || + ( extension == "VK_AMD_shader_core_properties2" ) || ( extension == "VK_AMD_device_coherent_memory" ) || + ( extension == "VK_EXT_shader_image_atomic_int64" ) || ( extension == "VK_KHR_spirv_1_4" ) || ( extension == "VK_EXT_memory_budget" ) || + ( extension == "VK_EXT_memory_priority" ) || ( extension == "VK_NV_dedicated_allocation_image_aliasing" ) || + ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_buffer_device_address" ) || + ( extension == "VK_EXT_tooling_info" ) || ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_present_wait" ) || + ( extension == "VK_NV_cooperative_matrix" ) || ( extension == "VK_NV_coverage_reduction_mode" ) || + ( extension == "VK_EXT_fragment_shader_interlock" ) || ( extension == "VK_EXT_ycbcr_image_arrays" ) || + ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || ( extension == "VK_EXT_provoking_vertex" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_EXT_full_screen_exclusive" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_shader_atomic_float" ) || + ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || + ( extension == "VK_KHR_deferred_host_operations" ) || ( extension == "VK_KHR_pipeline_executable_properties" ) || + ( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_shader_atomic_float2" ) || + ( extension == "VK_EXT_swapchain_maintenance1" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || + ( extension == "VK_NV_device_generated_commands" ) || ( extension == "VK_NV_inherited_viewport_scissor" ) || + ( extension == "VK_KHR_shader_integer_dot_product" ) || ( extension == "VK_EXT_texel_buffer_alignment" ) || + ( extension == "VK_QCOM_render_pass_transform" ) || ( extension == "VK_EXT_depth_bias_control" ) || ( extension == "VK_EXT_device_memory_report" ) || + ( extension == "VK_EXT_robustness2" ) || ( extension == "VK_EXT_custom_border_color" ) || ( extension == "VK_GOOGLE_user_type" ) || + ( extension == "VK_KHR_pipeline_library" ) || ( extension == "VK_NV_present_barrier" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || + ( extension == "VK_KHR_present_id" ) || ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) || + ( extension == "VK_KHR_video_encode_queue" ) || ( extension == "VK_NV_device_diagnostics_config" ) || + ( extension == "VK_QCOM_render_pass_store_ops" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_NV_cuda_kernel_launch" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_NV_low_latency" ) +#if defined( VK_USE_PLATFORM_METAL_EXT ) + || ( extension == "VK_EXT_metal_objects" ) +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + || ( extension == "VK_KHR_synchronization2" ) || ( extension == "VK_EXT_descriptor_buffer" ) || ( extension == "VK_EXT_graphics_pipeline_library" ) || + ( extension == "VK_AMD_shader_early_and_late_fragment_tests" ) || ( extension == "VK_KHR_fragment_shader_barycentric" ) || + ( extension == "VK_KHR_shader_subgroup_uniform_control_flow" ) || ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || + ( extension == "VK_NV_fragment_shading_rate_enums" ) || ( extension == "VK_NV_ray_tracing_motion_blur" ) || ( extension == "VK_EXT_mesh_shader" ) || + ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) || ( extension == "VK_EXT_fragment_density_map2" ) || + ( extension == "VK_QCOM_rotated_copy_commands" ) || ( extension == "VK_EXT_image_robustness" ) || + ( extension == "VK_KHR_workgroup_memory_explicit_layout" ) || ( extension == "VK_KHR_copy_commands2" ) || + ( extension == "VK_EXT_image_compression_control" ) || ( extension == "VK_EXT_attachment_feedback_loop_layout" ) || + ( extension == "VK_EXT_4444_formats" ) || ( extension == "VK_EXT_device_fault" ) || + ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_EXT_rgba10x6_formats" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_NV_acquire_winrt_display" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_VALVE_mutable_descriptor_type" ) || ( extension == "VK_EXT_vertex_input_dynamic_state" ) || + ( extension == "VK_EXT_physical_device_drm" ) || ( extension == "VK_EXT_device_address_binding_report" ) || + ( extension == "VK_EXT_depth_clip_control" ) || ( extension == "VK_EXT_primitive_topology_list_restart" ) || + ( extension == "VK_KHR_format_feature_flags2" ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + || ( extension == "VK_FUCHSIA_external_memory" ) || ( extension == "VK_FUCHSIA_external_semaphore" ) || ( extension == "VK_FUCHSIA_buffer_collection" ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + || ( extension == "VK_HUAWEI_subpass_shading" ) || ( extension == "VK_HUAWEI_invocation_mask" ) || ( extension == "VK_NV_external_memory_rdma" ) || + ( extension == "VK_EXT_pipeline_properties" ) || ( extension == "VK_EXT_frame_boundary" ) || + ( extension == "VK_EXT_multisampled_render_to_single_sampled" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) || + ( extension == "VK_EXT_color_write_enable" ) || ( extension == "VK_EXT_primitives_generated_query" ) || + ( extension == "VK_KHR_ray_tracing_maintenance1" ) || ( extension == "VK_EXT_global_priority_query" ) || + ( extension == "VK_EXT_image_view_min_lod" ) || ( extension == "VK_EXT_multi_draw" ) || ( extension == "VK_EXT_image_2d_view_of_3d" ) || + ( extension == "VK_EXT_shader_tile_image" ) || ( extension == "VK_EXT_opacity_micromap" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_NV_displacement_micromap" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_HUAWEI_cluster_culling_shader" ) || + ( extension == "VK_EXT_border_color_swizzle" ) || ( extension == "VK_EXT_pageable_device_local_memory" ) || ( extension == "VK_KHR_maintenance4" ) || + ( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_ARM_scheduling_controls" ) || + ( extension == "VK_EXT_image_sliced_view_of_3d" ) || ( extension == "VK_VALVE_descriptor_set_host_mapping" ) || + ( extension == "VK_EXT_depth_clamp_zero_one" ) || ( extension == "VK_EXT_non_seamless_cube_map" ) || ( extension == "VK_ARM_render_pass_striped" ) || + ( extension == "VK_QCOM_fragment_density_map_offset" ) || ( extension == "VK_NV_copy_memory_indirect" ) || + ( extension == "VK_NV_memory_decompression" ) || ( extension == "VK_NV_device_generated_commands_compute" ) || + ( extension == "VK_NV_linear_color_attachment" ) || ( extension == "VK_EXT_image_compression_control_swapchain" ) || + ( extension == "VK_QCOM_image_processing" ) || ( extension == "VK_EXT_nested_command_buffer" ) || + ( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) || + ( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) || + ( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_NV_optical_flow" ) || + ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_ANDROID_external_format_resolve" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || ( extension == "VK_EXT_shader_object" ) || + ( extension == "VK_QCOM_tile_properties" ) || ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) || + ( extension == "VK_NV_ray_tracing_invocation_reorder" ) || ( extension == "VK_NV_extended_sparse_address_space" ) || + ( extension == "VK_EXT_mutable_descriptor_type" ) || ( extension == "VK_ARM_shader_core_builtins" ) || + ( extension == "VK_EXT_pipeline_library_group_handles" ) || ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || + ( extension == "VK_NV_low_latency2" ) || ( extension == "VK_KHR_cooperative_matrix" ) || + ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || ( extension == "VK_KHR_video_maintenance1" ) || + ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || + ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || ( extension == "VK_QCOM_filter_cubic_clamp" ) || + ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" ) || ( extension == "VK_KHR_vertex_attribute_divisor" ) +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + || ( extension == "VK_QNX_external_memory_screen_buffer" ) +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + || ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_KHR_calibrated_timestamps" ) || ( extension == "VK_KHR_maintenance6" ) || + ( extension == "VK_NV_descriptor_pool_overallocation" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_surface" ) || ( extension == "VK_KHR_display" ) +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + || ( extension == "VK_KHR_xlib_surface" ) +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + || ( extension == "VK_KHR_xcb_surface" ) +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + || ( extension == "VK_KHR_wayland_surface" ) +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_KHR_android_surface" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_win32_surface" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_EXT_debug_report" ) +#if defined( VK_USE_PLATFORM_GGP ) + || ( extension == "VK_GGP_stream_descriptor_surface" ) +#endif /*VK_USE_PLATFORM_GGP*/ + || ( extension == "VK_NV_external_memory_capabilities" ) || ( extension == "VK_KHR_get_physical_device_properties2" ) || + ( extension == "VK_EXT_validation_flags" ) +#if defined( VK_USE_PLATFORM_VI_NN ) + || ( extension == "VK_NN_vi_surface" ) +#endif /*VK_USE_PLATFORM_VI_NN*/ + || ( extension == "VK_KHR_device_group_creation" ) || ( extension == "VK_KHR_external_memory_capabilities" ) || + ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_EXT_direct_mode_display" ) +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + || ( extension == "VK_EXT_acquire_xlib_display" ) +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + || ( extension == "VK_EXT_display_surface_counter" ) || ( extension == "VK_EXT_swapchain_colorspace" ) || + ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_get_surface_capabilities2" ) || + ( extension == "VK_KHR_get_display_properties2" ) +#if defined( VK_USE_PLATFORM_IOS_MVK ) + || ( extension == "VK_MVK_ios_surface" ) +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + || ( extension == "VK_MVK_macos_surface" ) +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + || ( extension == "VK_EXT_debug_utils" ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + || ( extension == "VK_FUCHSIA_imagepipe_surface" ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + || ( extension == "VK_EXT_metal_surface" ) +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + || ( extension == "VK_KHR_surface_protected_capabilities" ) || ( extension == "VK_EXT_validation_features" ) || + ( extension == "VK_EXT_headless_surface" ) || ( extension == "VK_EXT_surface_maintenance1" ) || ( extension == "VK_EXT_acquire_drm_display" ) +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + || ( extension == "VK_EXT_directfb_surface" ) +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + || ( extension == "VK_QNX_screen_surface" ) +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + || ( extension == "VK_KHR_portability_enumeration" ) || ( extension == "VK_GOOGLE_surfaceless_query" ) || + ( extension == "VK_LUNARG_direct_driver_loading" ) || ( extension == "VK_EXT_layer_settings" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isObsoletedExtension( std::string const & extension ) + { + return ( extension == "VK_AMD_negative_viewport_height" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isPromotedExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) || ( extension == "VK_EXT_debug_marker" ) || ( extension == "VK_AMD_draw_indirect_count" ) || + ( extension == "VK_KHR_dynamic_rendering" ) || ( extension == "VK_KHR_multiview" ) || +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + ( extension == "VK_NV_win32_keyed_mutex" ) || +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + ( extension == "VK_KHR_get_physical_device_properties2" ) || ( extension == "VK_KHR_device_group" ) || + ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) || + ( extension == "VK_KHR_maintenance1" ) || ( extension == "VK_KHR_device_group_creation" ) || + ( extension == "VK_KHR_external_memory_capabilities" ) || ( extension == "VK_KHR_external_memory" ) || + ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_KHR_external_semaphore" ) || + ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || ( extension == "VK_KHR_descriptor_update_template" ) || + ( extension == "VK_KHR_imageless_framebuffer" ) || ( extension == "VK_KHR_create_renderpass2" ) || + ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_external_fence" ) || ( extension == "VK_KHR_maintenance2" ) || + ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_KHR_dedicated_allocation" ) || ( extension == "VK_EXT_sampler_filter_minmax" ) || + ( extension == "VK_KHR_storage_buffer_storage_class" ) || ( extension == "VK_EXT_inline_uniform_block" ) || + ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) || + ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) || + ( extension == "VK_EXT_descriptor_indexing" ) || ( extension == "VK_EXT_shader_viewport_index_layer" ) || ( extension == "VK_KHR_maintenance3" ) || + ( extension == "VK_KHR_draw_indirect_count" ) || ( extension == "VK_EXT_global_priority" ) || + ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || + ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || + ( extension == "VK_EXT_vertex_attribute_divisor" ) || ( extension == "VK_EXT_pipeline_creation_feedback" ) || + ( extension == "VK_KHR_driver_properties" ) || ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_KHR_depth_stencil_resolve" ) || + ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_KHR_timeline_semaphore" ) || + ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) || + ( extension == "VK_EXT_scalar_block_layout" ) || ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_spirv_1_4" ) || + ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_tooling_info" ) || + ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || + ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || + ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) || + ( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_EXT_private_data" ) || + ( extension == "VK_EXT_pipeline_creation_cache_control" ) || ( extension == "VK_KHR_synchronization2" ) || + ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) || + ( extension == "VK_EXT_image_robustness" ) || ( extension == "VK_KHR_copy_commands2" ) || ( extension == "VK_EXT_4444_formats" ) || + ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_VALVE_mutable_descriptor_type" ) || + ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) || + ( extension == "VK_EXT_global_priority_query" ) || ( extension == "VK_KHR_maintenance4" ); + } +} // namespace VULKAN_HPP_NAMESPACE + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_format_traits.hpp b/MacroLibX/third_party/vulkan/vulkan_format_traits.hpp new file mode 100644 index 0000000..626d7d2 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_format_traits.hpp @@ -0,0 +1,7669 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_FORMAT_TRAITS_HPP +#define VULKAN_FORMAT_TRAITS_HPP + +#include + +namespace VULKAN_HPP_NAMESPACE +{ + + //===================== + //=== Format Traits === + //===================== + + // The three-dimensional extent of a texel block. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array blockExtent( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return { { 4, 4, 1 } }; + + default: return { { 1, 1, 1 } }; + } + } + + // The texel block size in bytes. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 12; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 24; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 32; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 5; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 8; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 8; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 6; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 6; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; + + default: VULKAN_HPP_ASSERT( false ); return 0; + } + } + + // The class of the format (can't be just named "class"!) + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compatibilityClass( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return "D16"; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return "D24"; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return "D32"; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return "S8"; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return "D16S8"; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return "D24S8"; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return "D32S8"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC1_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC1_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC1_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC1_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC2"; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC2"; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC3"; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC3"; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC4"; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC4"; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC5"; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC5"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC6H"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC6H"; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC7"; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC7"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2_EAC_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2_EAC_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC_R"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC_R"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC_RG"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC_RG"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return "32-bit G8B8G8R8"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return "32-bit B8G8R8G8"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return "8-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return "8-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return "8-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return "8-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return "8-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "64-bit R10G10B10A10"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "64-bit G10B10G10R10"; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "64-bit B10G10R10G10"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "10-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "10-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "10-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "10-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "10-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "64-bit R12G12B12A12"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "64-bit G12B12G12R12"; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "64-bit B12G12R12G12"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "12-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "12-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "12-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "12-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "12-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return "64-bit G16B16G16R16"; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return "64-bit B16G16R16G16"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return "16-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return "16-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return "16-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return "16-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return "16-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return "8-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "10-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "12-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return "16-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC1_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC1_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC2_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC2_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC1_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC1_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return "8-bit alpha"; + + default: VULKAN_HPP_ASSERT( false ); return ""; + } + } + + // The number of bits in this component, if not compressed, otherwise 0. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentBits( VULKAN_HPP_NAMESPACE::Format format, uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 6; + case 2: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 6; + case 2: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 5; + case 2: return 5; + case 3: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: + switch ( component ) + { + case 0: return 5; + case 1: return 5; + case 2: return 5; + case 3: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: + switch ( component ) + { + case 0: return 1; + case 1: return 5; + case 2: return 5; + case 3: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: + switch ( component ) + { + case 0: return 2; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: + switch ( component ) + { + case 0: return 32; + case 1: return 32; + case 2: return 32; + case 3: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: + switch ( component ) + { + case 0: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: + switch ( component ) + { + case 0: return 64; + case 1: return 64; + case 2: return 64; + case 3: return 64; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: + switch ( component ) + { + case 0: return 10; + case 1: return 11; + case 2: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: + switch ( component ) + { + case 0: return 9; + case 1: return 9; + case 2: return 9; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: + switch ( component ) + { + case 0: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: + switch ( component ) + { + case 0: return 24; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: + switch ( component ) + { + case 0: return 32; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: + switch ( component ) + { + case 0: return 16; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: + switch ( component ) + { + case 0: return 24; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: + switch ( component ) + { + case 0: return 32; + case 1: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: + switch ( component ) + { + case 0: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: + switch ( component ) + { + case 0: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: + switch ( component ) + { + case 0: return 11; + case 1: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: + switch ( component ) + { + case 0: return 11; + case 1: return 11; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + case 3: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: + switch ( component ) + { + case 0: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + case 3: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: + switch ( component ) + { + case 0: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + case 3: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + case 3: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return 8; + case 1: return 8; + case 2: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 10; + case 1: return 10; + case 2: return 10; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 12; + case 1: return 12; + case 2: return 12; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + case 2: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: + switch ( component ) + { + case 0: return 4; + case 1: return 4; + case 2: return 4; + case 3: return 4; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: + switch ( component ) + { + case 0: return 1; + case 1: return 5; + case 2: return 5; + case 3: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + + default: return 0; + } + } + + // The number of components of this format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentCount( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 3; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 3; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 3; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 1; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 2; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 4; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; + + default: return 0; + } + } + + // The name of the component + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentName( VULKAN_HPP_NAMESPACE::Format format, uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: + switch ( component ) + { + case 0: return "D"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: + switch ( component ) + { + case 0: return "D"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: + switch ( component ) + { + case 0: return "D"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: + switch ( component ) + { + case 0: return "S"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: + switch ( component ) + { + case 0: return "D"; + case 1: return "S"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: + switch ( component ) + { + case 0: return "D"; + case 1: return "S"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: + switch ( component ) + { + case 0: return "D"; + case 1: return "S"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: + switch ( component ) + { + case 0: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: + switch ( component ) + { + case 0: return "B"; + case 1: return "G"; + case 2: return "R"; + case 3: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return "G"; + case 1: return "B"; + case 2: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "R"; + case 2: return "G"; + case 3: return "B"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + case 2: return "B"; + case 3: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: + switch ( component ) + { + case 0: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + + default: return ""; + } + } + + // The numeric format of the component + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentNumericFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: + switch ( component ) + { + case 0: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: + switch ( component ) + { + case 0: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: + switch ( component ) + { + case 0: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + case 2: return "SNORM"; + case 3: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: + switch ( component ) + { + case 0: return "USCALED"; + case 1: return "USCALED"; + case 2: return "USCALED"; + case 3: return "USCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: + switch ( component ) + { + case 0: return "SSCALED"; + case 1: return "SSCALED"; + case 2: return "SSCALED"; + case 3: return "SSCALED"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: + switch ( component ) + { + case 0: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: + switch ( component ) + { + case 0: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: + switch ( component ) + { + case 0: return "UINT"; + case 1: return "UINT"; + case 2: return "UINT"; + case 3: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + case 2: return "SINT"; + case 3: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: + switch ( component ) + { + case 0: return "UFLOAT"; + case 1: return "UFLOAT"; + case 2: return "UFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: + switch ( component ) + { + case 0: return "UFLOAT"; + case 1: return "UFLOAT"; + case 2: return "UFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: + switch ( component ) + { + case 0: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: + switch ( component ) + { + case 0: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "UINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: + switch ( component ) + { + case 0: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: + switch ( component ) + { + case 0: return "UFLOAT"; + case 1: return "UFLOAT"; + case 2: return "UFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: + switch ( component ) + { + case 0: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: + switch ( component ) + { + case 0: return "SNORM"; + case 1: return "SNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: + switch ( component ) + { + case 0: return "SFLOAT"; + case 1: return "SFLOAT"; + case 2: return "SFLOAT"; + case 3: return "SFLOAT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: + switch ( component ) + { + case 0: return "SRGB"; + case 1: return "SRGB"; + case 2: return "SRGB"; + case 3: return "SRGB"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: + switch ( component ) + { + case 0: return "SINT"; + case 1: return "SINT"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + + default: return ""; + } + } + + // The plane this component lies in. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentPlaneIndex( VULKAN_HPP_NAMESPACE::Format format, uint8_t component ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( component ) + { + case 0: return 0; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + + default: return 0; + } + } + + // True, if the components of this format are compressed, otherwise false. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return true; + default: return false; + } + } + + // A textual description of the compression scheme, or an empty string if it is not compressed + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC"; + + default: return ""; + } + } + + // True, if this format is a compressed one. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format ) + { + return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 ); + } + + // The number of bits into which the format is packed. A single image element in this format + // can be stored in the same space as a scalar type of this bit width. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 16; + + default: return 0; + } + } + + // The single-plane format that this plane is compatible with. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_NAMESPACE::Format planeCompatibleFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm; + case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm; + default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return format; + } + } + + // The number of image planes of this format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2; + + default: return 1; + } + } + + // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return 1; + } + } + + // The relative width of this plane. A value of k means that this plane is 1/k the width of the overall format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeWidthDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + case 2: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 2; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + case 2: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: + switch ( plane ) + { + case 0: return 1; + case 1: return 1; + default: VULKAN_HPP_ASSERT( false ); return 1; + } + + default: VULKAN_HPP_ASSERT( plane == 0 ); return 1; + } + } + + // The number of texels in a texel block. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; + + default: VULKAN_HPP_ASSERT( false ); return 0; + } + } + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_fuchsia.h b/MacroLibX/third_party/vulkan/vulkan_fuchsia.h new file mode 100644 index 0000000..76e1564 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_fuchsia.h @@ -0,0 +1,262 @@ +#ifndef VULKAN_FUCHSIA_H_ +#define VULKAN_FUCHSIA_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_FUCHSIA_imagepipe_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_imagepipe_surface 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface" +typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA; +typedef struct VkImagePipeSurfaceCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImagePipeSurfaceCreateFlagsFUCHSIA flags; + zx_handle_t imagePipeHandle; +} VkImagePipeSurfaceCreateInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateImagePipeSurfaceFUCHSIA)(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( + VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_FUCHSIA_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_external_memory 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" +typedef struct VkImportMemoryZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + zx_handle_t handle; +} VkImportMemoryZirconHandleInfoFUCHSIA; + +typedef struct VkMemoryZirconHandlePropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryZirconHandlePropertiesFUCHSIA; + +typedef struct VkMemoryGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA( + VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); +#endif + + +// VK_FUCHSIA_external_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_external_semaphore 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" +typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + zx_handle_t zirconHandle; +} VkImportSemaphoreZirconHandleInfoFUCHSIA; + +typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); +#endif + + +// VK_FUCHSIA_buffer_collection is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_buffer_collection 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA) +#define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2 +#define VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME "VK_FUCHSIA_buffer_collection" +typedef VkFlags VkImageFormatConstraintsFlagsFUCHSIA; + +typedef enum VkImageConstraintsInfoFlagBitsFUCHSIA { + VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA = 0x00000001, + VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA = 0x00000002, + VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA = 0x00000004, + VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA = 0x00000008, + VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA = 0x00000010, + VK_IMAGE_CONSTRAINTS_INFO_FLAG_BITS_MAX_ENUM_FUCHSIA = 0x7FFFFFFF +} VkImageConstraintsInfoFlagBitsFUCHSIA; +typedef VkFlags VkImageConstraintsInfoFlagsFUCHSIA; +typedef struct VkBufferCollectionCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + zx_handle_t collectionToken; +} VkBufferCollectionCreateInfoFUCHSIA; + +typedef struct VkImportMemoryBufferCollectionFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkImportMemoryBufferCollectionFUCHSIA; + +typedef struct VkBufferCollectionImageCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionImageCreateInfoFUCHSIA; + +typedef struct VkBufferCollectionConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t minBufferCount; + uint32_t maxBufferCount; + uint32_t minBufferCountForCamping; + uint32_t minBufferCountForDedicatedSlack; + uint32_t minBufferCountForSharedSlack; +} VkBufferCollectionConstraintsInfoFUCHSIA; + +typedef struct VkBufferConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCreateInfo createInfo; + VkFormatFeatureFlags requiredFormatFeatures; + VkBufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints; +} VkBufferConstraintsInfoFUCHSIA; + +typedef struct VkBufferCollectionBufferCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionBufferCreateInfoFUCHSIA; + +typedef struct VkSysmemColorSpaceFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t colorSpace; +} VkSysmemColorSpaceFUCHSIA; + +typedef struct VkBufferCollectionPropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; + uint32_t bufferCount; + uint32_t createInfoIndex; + uint64_t sysmemPixelFormat; + VkFormatFeatureFlags formatFeatures; + VkSysmemColorSpaceFUCHSIA sysmemColorSpaceIndex; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkBufferCollectionPropertiesFUCHSIA; + +typedef struct VkImageFormatConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImageCreateInfo imageCreateInfo; + VkFormatFeatureFlags requiredFormatFeatures; + VkImageFormatConstraintsFlagsFUCHSIA flags; + uint64_t sysmemPixelFormat; + uint32_t colorSpaceCount; + const VkSysmemColorSpaceFUCHSIA* pColorSpaces; +} VkImageFormatConstraintsInfoFUCHSIA; + +typedef struct VkImageConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t formatConstraintsCount; + const VkImageFormatConstraintsInfoFUCHSIA* pFormatConstraints; + VkBufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints; + VkImageConstraintsInfoFlagsFUCHSIA flags; +} VkImageConstraintsInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferCollectionFUCHSIA)(VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferCollectionFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferCollectionPropertiesFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferCollectionFUCHSIA( + VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferCollectionFUCHSIA* pCollection); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionImageConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionBufferConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferCollectionFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferCollectionPropertiesFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA* pProperties); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_funcs.hpp b/MacroLibX/third_party/vulkan/vulkan_funcs.hpp new file mode 100644 index 0000000..a35d5e1 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_funcs.hpp @@ -0,0 +1,23610 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_FUNCS_HPP +#define VULKAN_FUNCS_HPP + +namespace VULKAN_HPP_NAMESPACE +{ + + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pInstance ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type createInstance( + const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Instance instance; + VkResult result = + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &instance ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); + + return createResultValueType( static_cast( result ), instance ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( + const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Instance instance; + VkResult result = + d.vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &instance ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( instance, ObjectDestroy( allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyInstance( m_instance, + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDevices( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector physicalDevices; + uint32_t physicalDeviceCount; + VkResult result; + do + { + result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ); + if ( ( result == VK_SUCCESS ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + if ( physicalDeviceCount < physicalDevices.size() ) + { + physicalDevices.resize( physicalDeviceCount ); + } + return createResultValueType( static_cast( result ), physicalDevices ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector physicalDevices( physicalDeviceAllocator ); + uint32_t physicalDeviceCount; + VkResult result; + do + { + result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ); + if ( ( result == VK_SUCCESS ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + if ( physicalDeviceCount < physicalDevices.size() ) + { + physicalDevices.resize( physicalDeviceCount ); + } + return createResultValueType( static_cast( result ), physicalDevices ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + + return features; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + + return createResultValueType( static_cast( result ), imageFormatProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector queueFamilyProperties( queueFamilyPropertiesAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetInstanceProcAddr( m_instance, pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceProcAddr( m_device, pName ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDevice ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDevice( + const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Device device; + VkResult result = + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &device ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); + + return createResultValueType( static_cast( result ), device ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Device device; + VkResult result = + d.vkCreateDevice( m_physicalDevice, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &device ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( device, ObjectDestroy( allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDevice( m_device, + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( extensionPropertiesAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( layerPropertiesAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + + return queue; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit( + VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueWaitIdle( m_queue ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkQueueWaitIdle( m_queue ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDeviceWaitIdle( m_device ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkDeviceWaitIdle( m_device ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAllocateMemory( m_device, + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMemory ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + VkResult result = + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &memory ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); + + return createResultValueType( static_cast( result ), memory ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + VkResult result = + d.vkAllocateMemory( m_device, + reinterpret_cast( &allocateInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &memory ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( memory, ObjectFree( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkFreeMemory( m_device, + static_cast( memory ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + void * pData; + VkResult result = d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); + + return createResultValueType( static_cast( result ), pData ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); + + return committedMemoryInBytes; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( + VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( + VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements + Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements( m_device, + static_cast( image ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirementsAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, + static_cast( image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( sparseImageFormatPropertiesAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::bindSparse( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateFence( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createFence( + const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Fence fence; + VkResult result = + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); + + return createResultValueType( static_cast( result ), fence ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( + const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Fence fence; + VkResult result = + d.vkCreateFence( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyFence( m_device, + static_cast( fence ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkGetFenceStatus( m_device, static_cast( fence ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkWaitForFences( m_device, fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSemaphore( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSemaphore ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + VkResult result = + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); + + return createResultValueType( static_cast( result ), semaphore ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + VkResult result = + d.vkCreateSemaphore( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( semaphore, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySemaphore( m_device, + static_cast( semaphore ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateEvent( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pEvent ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createEvent( + const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Event event; + VkResult result = + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &event ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); + + return createResultValueType( static_cast( result ), event ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( + const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Event event; + VkResult result = + d.vkCreateEvent( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &event ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( event, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyEvent( m_device, + static_cast( event ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkGetEventStatus( m_device, static_cast( event ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", + { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkSetEvent( m_device, static_cast( event ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkResetEvent( m_device, static_cast( event ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateQueryPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pQueryPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + VkResult result = + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); + + return createResultValueType( static_cast( result ), queryPool ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + VkResult result = + d.vkCreateQueryPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( queryPool, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyQueryPool( m_device, + static_cast( queryPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + dataSize, + pData, + static_cast( stride ), + static_cast( flags ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return ResultValue>( static_cast( result ), data ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return ResultValue( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateBuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createBuffer( + const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Buffer buffer; + VkResult result = + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &buffer ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); + + return createResultValueType( static_cast( result ), buffer ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( + const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Buffer buffer; + VkResult result = + d.vkCreateBuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &buffer ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( buffer, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyBuffer( m_device, + static_cast( buffer ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateBufferView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::BufferView view; + VkResult result = + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); + + return createResultValueType( static_cast( result ), view ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::BufferView view; + VkResult result = + d.vkCreateBufferView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyBufferView( m_device, + static_cast( bufferView ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateImage( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pImage ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createImage( + const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Image image; + VkResult result = + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &image ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); + + return createResultValueType( static_cast( result ), image ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( + const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Image image; + VkResult result = + d.vkCreateImage( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &image ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( image, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyImage( m_device, + static_cast( image ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateImageView( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pView ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ImageView view; + VkResult result = + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); + + return createResultValueType( static_cast( result ), view ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ImageView view; + VkResult result = + d.vkCreateImageView( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyImageView( m_device, + static_cast( imageView ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateShaderModule( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaderModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + VkResult result = + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); + + return createResultValueType( static_cast( result ), shaderModule ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + VkResult result = + d.vkCreateShaderModule( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( shaderModule, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyShaderModule( m_device, + static_cast( shaderModule ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePipelineCache( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + VkResult result = + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); + + return createResultValueType( static_cast( result ), pipelineCache ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + VkResult result = + d.vkCreatePipelineCache( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( pipelineCache, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPipelineCache( m_device, + static_cast( pipelineCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector data; + size_t dataSize; + VkResult result; + do + { + result = d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return createResultValueType( static_cast( result ), data ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector data( uint8_tAllocator ); + size_t dataSize; + VkResult result; + do + { + result = d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkMergePipelineCaches( + m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateGraphicsPipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VkResult result = d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( static_cast( result ), pipeline ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateGraphicsPipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + static_cast( result ), + UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateComputePipelines( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VkResult result = d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( static_cast( result ), pipeline ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateComputePipelines( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + static_cast( result ), + UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPipeline( m_device, + static_cast( pipeline ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePipelineLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelineLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + VkResult result = + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); + + return createResultValueType( static_cast( result ), pipelineLayout ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + VkResult result = + d.vkCreatePipelineLayout( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( pipelineLayout, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPipelineLayout( m_device, + static_cast( pipelineLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSampler( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSampler ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::createSampler( + const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Sampler sampler; + VkResult result = + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &sampler ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); + + return createResultValueType( static_cast( result ), sampler ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( + const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Sampler sampler; + VkResult result = + d.vkCreateSampler( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &sampler ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( sampler, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySampler( m_device, + static_cast( sampler ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorSetLayout( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSetLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + VkResult result = d.vkCreateDescriptorSetLayout( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); + + return createResultValueType( static_cast( result ), setLayout ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + VkResult result = d.vkCreateDescriptorSetLayout( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( setLayout, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( + m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDescriptorSetLayout( + m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorSetLayout( + m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDescriptorSetLayout( + m_device, + static_cast( descriptorSetLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + VkResult result = + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); + + return createResultValueType( static_cast( result ), descriptorPool ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + VkResult result = + d.vkCreateDescriptorPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( descriptorPool, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDescriptorPool( m_device, + static_cast( descriptorPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VkResult result = d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + + return createResultValueType( static_cast( result ), descriptorSets ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); + VkResult result = d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + + return createResultValueType( static_cast( result ), descriptorSets ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VkResult result = d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + std::vector, DescriptorSetAllocator> uniqueDescriptorSets; + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( auto const & descriptorSet : descriptorSets ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueDescriptorSets ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, DescriptorSetAllocator>>::type + Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VkResult result = d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + std::vector, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); + uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( auto const & descriptorSet : descriptorSets ) + { + uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueDescriptorSets ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkFreeDescriptorSets( + m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkFreeDescriptorSets( + m_device, static_cast( descriptorPool ), descriptorSets.size(), reinterpret_cast( descriptorSets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkFreeDescriptorSets( + m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkFreeDescriptorSets( + m_device, static_cast( descriptorPool ), descriptorSets.size(), reinterpret_cast( descriptorSets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSets( m_device, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ), + descriptorCopyCount, + reinterpret_cast( pDescriptorCopies ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkUpdateDescriptorSets( m_device, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateFramebuffer( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFramebuffer ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + VkResult result = + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); + + return createResultValueType( static_cast( result ), framebuffer ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + VkResult result = + d.vkCreateFramebuffer( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( framebuffer, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyFramebuffer( m_device, + static_cast( framebuffer ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VkResult result = + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); + + return createResultValueType( static_cast( result ), renderPass ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VkResult result = + d.vkCreateRenderPass( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyRenderPass( m_device, + static_cast( renderPass ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCommandPool( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCommandPool ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + VkResult result = + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); + + return createResultValueType( static_cast( result ), commandPool ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + VkResult result = + d.vkCreateCommandPool( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( commandPool, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCommandPool( m_device, + static_cast( commandPool ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VkResult result = d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + + return createResultValueType( static_cast( result ), commandBuffers ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); + VkResult result = d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + + return createResultValueType( static_cast( result ), commandBuffers ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VkResult result = d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + std::vector, CommandBufferAllocator> uniqueCommandBuffers; + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( auto const & commandBuffer : commandBuffers ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueCommandBuffers ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, CommandBufferAllocator>>::type + Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VkResult result = d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + std::vector, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); + uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( auto const & commandBuffer : commandBuffers ) + { + uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueCommandBuffers ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( + m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkFreeCommandBuffers( + m_device, static_cast( commandPool ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkFreeCommandBuffers( + m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkFreeCommandBuffers( + m_device, static_cast( commandPool ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkEndCommandBuffer( m_commandBuffer ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSetCount, + reinterpret_cast( pDescriptorSets ), + dynamicOffsetCount, + pDynamicOffsets ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBindDescriptorSets( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers( + m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::draw( + uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyBuffer( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ), + static_cast( filter ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBlitImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyBufferToImage( m_commandBuffer, + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyImageToBuffer( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdateBuffer( + m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( dataSize ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdUpdateBuffer( m_commandBuffer, + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pColor ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdClearColorImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( pDepthStencil ), + rangeCount, + reinterpret_cast( pRanges ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdClearDepthStencilImage( m_commandBuffer, + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdClearAttachments( m_commandBuffer, + attachmentCount, + reinterpret_cast( pAttachments ), + rectCount, + reinterpret_cast( pRects ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdClearAttachments( m_commandBuffer, + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regionCount, + reinterpret_cast( pRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdResolveImage( m_commandBuffer, + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents( m_commandBuffer, + eventCount, + reinterpret_cast( pEvents ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdWaitEvents( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarrierCount, + reinterpret_cast( pMemoryBarriers ), + bufferMemoryBarrierCount, + reinterpret_cast( pBufferMemoryBarriers ), + imageMemoryBarrierCount, + reinterpret_cast( pImageMemoryBarriers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPipelineBarrier( m_commandBuffer, + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyQueryPoolResults( m_commandBuffer, + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPushConstants( m_commandBuffer, + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( ValuesType ), + reinterpret_cast( values.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d ) + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint32_t apiVersion; + VkResult result = d.vkEnumerateInstanceVersion( &apiVersion ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); + + return createResultValueType( static_cast( result ), apiVersion ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeatures( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( + uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VkResult result; + do + { + result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ); + if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = d.vkEnumeratePhysicalDeviceGroups( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValueType( static_cast( result ), physicalDeviceGroupProperties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + VkResult result; + do + { + result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ); + if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = d.vkEnumeratePhysicalDeviceGroups( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValueType( static_cast( result ), physicalDeviceGroupProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return createResultValueType( static_cast( result ), imageFormatProperties ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return createResultValueType( static_cast( result ), structureChain ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector queueFamilyProperties( queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector structureChains( structureChainAllocator ); + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + + return queue; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VkResult result = d.vkCreateSamplerYcbcrConversion( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); + + return createResultValueType( static_cast( result ), ycbcrConversion ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VkResult result = d.vkCreateSamplerYcbcrConversion( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( + m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversion( + m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySamplerYcbcrConversion( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VkResult result = d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); + + return createResultValueType( static_cast( result ), descriptorUpdateTemplate ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VkResult result = d.vkCreateDescriptorUpdateTemplate( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( + descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( + m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplate( + m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDescriptorUpdateTemplate( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplate( + m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkUpdateDescriptorSetWithTemplate( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSupport( + m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass2( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VkResult result = + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); + + return createResultValueType( static_cast( result ), renderPass ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VkResult result = + d.vkCreateRenderPass2( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2( + m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBeginRenderPass2( + m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2( + m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdNextSubpass2( + m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t value; + VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); + + return createResultValueType( static_cast( result ), value ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolProperties( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector toolProperties; + uint32_t toolCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ); + if ( ( result == VK_SUCCESS ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return createResultValueType( static_cast( result ), toolProperties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector toolProperties( + physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ); + if ( ( result == VK_SUCCESS ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return createResultValueType( static_cast( result ), toolProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VkResult result = + d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); + + return createResultValueType( static_cast( result ), privateDataSlot ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VkResult result = + d.vkCreatePrivateDataSlot( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPrivateDataSlot( + m_device, + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlot( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPrivateDataSlot( + m_device, + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkSetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t data; + d.vkGetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2( m_commandBuffer, static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetEvent2( m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent2( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2( + m_commandBuffer, eventCount, reinterpret_cast( pEvents ), reinterpret_cast( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit2( + VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast( pRenderingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRendering( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCullMode( m_commandBuffer, static_cast( cullMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFace( m_commandBuffer, static_cast( frontFace ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast( primitiveTopology ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers2( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast( depthTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast( depthWriteEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast( depthCompareOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast( stencilTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOp( m_commandBuffer, + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast( depthBiasEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + } + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceBufferMemoryRequirements( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirements( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceBufferMemoryRequirements( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageMemoryRequirements( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirements( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceImageMemoryRequirements( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirements( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirements( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_surface === + + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( + m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Bool32 supported; + VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR( + m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + + return createResultValueType( static_cast( result ), supported ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + + return createResultValueType( static_cast( result ), surfaceCapabilities ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValueType( static_cast( result ), surfaceFormats ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector surfaceFormats( surfaceFormatKHRAllocator ); + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValueType( static_cast( result ), surfaceFormats ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector presentModes; + uint32_t presentModeCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ); + if ( ( result == VK_SUCCESS ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return createResultValueType( static_cast( result ), presentModes ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ); + if ( ( result == VK_SUCCESS ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return createResultValueType( static_cast( result ), presentModes ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchain ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VkResult result = + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); + + return createResultValueType( static_cast( result ), swapchain ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VkResult result = + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector swapchainImages; + uint32_t swapchainImageCount; + VkResult result; + do + { + result = d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ); + if ( ( result == VK_SUCCESS ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return createResultValueType( static_cast( result ), swapchainImages ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector swapchainImages( imageAllocator ); + uint32_t swapchainImageCount; + VkResult result; + do + { + result = d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ); + if ( ( result == VK_SUCCESS ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return createResultValueType( static_cast( result ), swapchainImages ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireNextImageKHR( + m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint32_t imageIndex; + VkResult result = d.vkAcquireNextImageKHR( + m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return ResultValue( static_cast( result ), imageIndex ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + VkResult result = + d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + + return createResultValueType( static_cast( result ), deviceGroupPresentCapabilities ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( + m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR( + m_device, static_cast( surface ), reinterpret_cast( &modes ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + + return createResultValueType( static_cast( result ), modes ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector rects; + uint32_t rectCount; + VkResult result; + do + { + result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ); + if ( ( result == VK_SUCCESS ) && rectCount ) + { + rects.resize( rectCount ); + result = d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return createResultValueType( static_cast( result ), rects ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector rects( rect2DAllocator ); + uint32_t rectCount; + VkResult result; + do + { + result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ); + if ( ( result == VK_SUCCESS ) && rectCount ) + { + rects.resize( rectCount ); + result = d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return createResultValueType( static_cast( result ), rects ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint32_t imageIndex; + VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return ResultValue( static_cast( result ), imageIndex ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( displayPropertiesKHRAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( displayPlanePropertiesKHRAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector displays; + uint32_t displayCount; + VkResult result; + do + { + result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ); + if ( ( result == VK_SUCCESS ) && displayCount ) + { + displays.resize( displayCount ); + result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + return createResultValueType( static_cast( result ), displays ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector displays( displayKHRAllocator ); + uint32_t displayCount; + VkResult result; + do + { + result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ); + if ( ( result == VK_SUCCESS ) && displayCount ) + { + displays.resize( displayCount ); + result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + return createResultValueType( static_cast( result ), displays ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( displayModePropertiesKHRAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMode ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + VkResult result = + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); + + return createResultValueType( static_cast( result ), mode ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + VkResult result = + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( mode, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( + m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR( + m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + + return createResultValueType( static_cast( result ), capabilities ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display_swapchain === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, + swapchainCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector swapchains( createInfos.size() ); + VkResult result = d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + + return createResultValueType( static_cast( result ), swapchains ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector swapchains( createInfos.size(), swapchainKHRAllocator ); + VkResult result = d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + + return createResultValueType( static_cast( result ), swapchains ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VkResult result = d.vkCreateSharedSwapchainsKHR( + m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); + + return createResultValueType( static_cast( result ), swapchain ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector swapchains( createInfos.size() ); + VkResult result = d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + std::vector, SwapchainKHRAllocator> uniqueSwapchains; + uniqueSwapchains.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & swapchain : swapchains ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueSwapchains ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector swapchains( createInfos.size() ); + VkResult result = d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); + uniqueSwapchains.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & swapchain : swapchains ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueSwapchains ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VkResult result = d.vkCreateSharedSwapchainsKHR( + m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateWaylandSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateWaylandSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateAndroidSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateAndroidSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCallback ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + VkResult result = d.vkCreateDebugReportCallbackEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); + + return createResultValueType( static_cast( result ), callback ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + VkResult result = d.vkCreateDebugReportCallbackEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( callback, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( + m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugReportCallbackEXT( + m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + pLayerPrefix, + pMessage ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_marker === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast( pVideoProfile ), reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return createResultValueType( static_cast( result ), capabilities ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); + VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return createResultValueType( static_cast( result ), structureChain ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( pVideoFormatInfo ), + pVideoFormatPropertyCount, + reinterpret_cast( pVideoFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return createResultValueType( static_cast( result ), videoFormatProperties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector videoFormatProperties( videoFormatPropertiesKHRAllocator ); + uint32_t videoFormatPropertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return createResultValueType( static_cast( result ), videoFormatProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSession ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + VkResult result = + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + + return createResultValueType( static_cast( result ), videoSession ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + VkResult result = + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( videoSession, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( m_device, static_cast( videoSession ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyVideoSessionKHR( + m_device, + static_cast( videoSession ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionKHR( m_device, static_cast( videoSession ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyVideoSessionKHR( + m_device, + static_cast( videoSession ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast( videoSession ), + pMemoryRequirementsCount, + reinterpret_cast( pMemoryRequirements ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector memoryRequirements; + uint32_t memoryRequirementsCount; + VkResult result; + do + { + result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast( videoSession ), &memoryRequirementsCount, nullptr ); + if ( ( result == VK_SUCCESS ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast( videoSession ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector memoryRequirements( + videoSessionMemoryRequirementsKHRAllocator ); + uint32_t memoryRequirementsCount; + VkResult result; + do + { + result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast( videoSession ), &memoryRequirementsCount, nullptr ); + if ( ( result == VK_SUCCESS ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast( videoSession ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindVideoSessionMemoryKHR( m_device, + static_cast( videoSession ), + bindSessionMemoryInfoCount, + reinterpret_cast( pBindSessionMemoryInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBindVideoSessionMemoryKHR( m_device, + static_cast( videoSession ), + bindSessionMemoryInfos.size(), + reinterpret_cast( bindSessionMemoryInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateVideoSessionParametersKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSessionParameters ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + VkResult result = d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); + + return createResultValueType( static_cast( result ), videoSessionParameters ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + VkResult result = d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( + videoSessionParameters, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkUpdateVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( pUpdateInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( &updateInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( + m_device, static_cast( videoSessionParameters ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyVideoSessionParametersKHR( + m_device, static_cast( videoSessionParameters ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( pBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( &beginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( pEndCodingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( &endCodingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast( pCodingControlInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast( &codingControlInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_decode_queue === + + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( pDecodeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( &decodeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_transform_feedback === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + //=== VK_NVX_binary_import === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + VkResult result = + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); + + return createResultValueType( static_cast( result ), module ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + VkResult result = + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + VkResult result = + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); + + return createResultValueType( static_cast( result ), function ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + VkResult result = + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( pLaunchInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_image_view_handle === + + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + VkResult result = + d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); + + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_draw_indirect_count === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_AMD_shader_info === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + pInfoSize, + pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector info; + size_t infoSize; + VkResult result; + do + { + result = d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && infoSize ) + { + info.resize( infoSize ); + result = d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return createResultValueType( static_cast( result ), info ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector info( uint8_tAllocator ); + size_t infoSize; + VkResult result; + do + { + result = d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && infoSize ) + { + info.resize( infoSize ); + result = d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return createResultValueType( static_cast( result ), info ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_dynamic_rendering === + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( pRenderingInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderingKHR( m_commandBuffer ); + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( pExternalImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + VkResult result = + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + + return createResultValueType( static_cast( result ), externalImageFormatProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( + VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + HANDLE handle; + VkResult result = + d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + + return createResultValueType( static_cast( result ), handle ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return createResultValueType( static_cast( result ), imageFormatProperties ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return createResultValueType( static_cast( result ), structureChain ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector queueFamilyProperties( queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector structureChains( structureChainAllocator ); + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_device_group === + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( + uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VkResult result; + do + { + result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ); + if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValueType( static_cast( result ), physicalDeviceGroupProperties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + VkResult result; + do + { + result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ); + if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValueType( static_cast( result ), physicalDeviceGroupProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_memory_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + HANDLE handle; + VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + + return createResultValueType( static_cast( result ), handle ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, + static_cast( handleType ), + handle, + reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device, + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + + return createResultValueType( static_cast( result ), memoryWin32HandleProperties ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + int fd; + VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + + return createResultValueType( static_cast( result ), fd ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryFdPropertiesKHR( + m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + VkResult result = d.vkGetMemoryFdPropertiesKHR( + m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + + return createResultValueType( static_cast( result ), memoryFdProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_semaphore_capabilities === + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + HANDLE handle; + VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + + return createResultValueType( static_cast( result ), handle ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + int fd; + VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + + return createResultValueType( static_cast( result ), fd ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_push_descriptor === + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplateKHR( + m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_conditional_rendering === + + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VkResult result = d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + + return createResultValueType( static_cast( result ), descriptorUpdateTemplate ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VkResult result = d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( + descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplateKHR( + m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_clip_space_w_scaling === + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetViewportWScalingNV( + m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); + + return createResultValueType( static_cast( result ), display ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( display, ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + + return createResultValueType( static_cast( result ), surfaceCapabilities ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_display_control === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkRegisterDeviceEventEXT( m_device, + reinterpret_cast( pDeviceEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Fence fence; + VkResult result = d.vkRegisterDeviceEventEXT( + m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + + return createResultValueType( static_cast( result ), fence ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Fence fence; + VkResult result = d.vkRegisterDeviceEventEXT( + m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkRegisterDisplayEventEXT( m_device, + static_cast( display ), + reinterpret_cast( pDisplayEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Fence fence; + VkResult result = d.vkRegisterDisplayEventEXT( + m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); + + return createResultValueType( static_cast( result ), fence ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Fence fence; + VkResult result = d.vkRegisterDisplayEventEXT( + m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t counterValue; + VkResult result = + d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); + + return createResultValueType( static_cast( result ), counterValue ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_GOOGLE_display_timing === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + VkResult result = d.vkGetRefreshCycleDurationGOOGLE( + m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + + return createResultValueType( static_cast( result ), displayTimingProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast( swapchain ), + pPresentationTimingCount, + reinterpret_cast( pPresentationTimings ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector presentationTimings; + uint32_t presentationTimingCount; + VkResult result; + do + { + result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ); + if ( ( result == VK_SUCCESS ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return createResultValueType( static_cast( result ), presentationTimings ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector presentationTimings( + pastPresentationTimingGOOGLEAllocator ); + uint32_t presentationTimingCount; + VkResult result; + do + { + result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ); + if ( ( result == VK_SUCCESS ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return createResultValueType( static_cast( result ), presentationTimings ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_discard_rectangles === + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetDiscardRectangleEXT( + m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast( discardRectangles.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast( discardRectangleEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast( discardRectangleMode ) ); + } + + //=== VK_EXT_hdr_metadata === + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetHdrMetadataEXT( + m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkSetHdrMetadataEXT( m_device, + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_create_renderpass2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VkResult result = + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + + return createResultValueType( static_cast( result ), renderPass ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VkResult result = + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2KHR( + m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBeginRenderPass2KHR( + m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2KHR( + m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdNextSubpass2KHR( + m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_fence_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + HANDLE handle; + VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + + return createResultValueType( static_cast( result ), handle ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + int fd; + VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + + return createResultValueType( static_cast( result ), fd ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, + queueFamilyIndex, + pCounterCount, + reinterpret_cast( pCounters ), + reinterpret_cast( pCounterDescriptions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair, + std::vector> + data_; + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VkResult result; + do + { + result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ); + if ( ( result == VK_SUCCESS ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return createResultValueType( static_cast( result ), data_ ); + } + + template ::value && + std::is_same::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair, + std::vector> + data_( + std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VkResult result; + do + { + result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ); + if ( ( result == VK_SUCCESS ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return createResultValueType( static_cast( result ), data_ ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, reinterpret_cast( pPerformanceQueryCreateInfo ), pNumPasses ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint32_t numPasses; + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, reinterpret_cast( &performanceQueryCreateInfo ), &numPasses ); + + return numPasses; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkReleaseProfilingLockKHR( m_device ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return createResultValueType( static_cast( result ), surfaceCapabilities ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get(); + VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return createResultValueType( static_cast( result ), structureChain ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValueType( static_cast( result ), surfaceFormats ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector surfaceFormats( surfaceFormat2KHRAllocator ); + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValueType( static_cast( result ), surfaceFormats ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector structureChains; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get() = surfaceFormats[i]; + } + return createResultValueType( static_cast( result ), structureChains ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector structureChains( structureChainAllocator ); + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get() = surfaceFormats[i]; + } + return createResultValueType( static_cast( result ), structureChains ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_display_properties2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( displayProperties2KHRAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( displayPlaneProperties2KHRAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( displayModeProperties2KHRAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( pDisplayPlaneInfo ), + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + + return createResultValueType( static_cast( result ), capabilities ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMessenger ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + VkResult result = d.vkCreateDebugUtilsMessengerEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); + + return createResultValueType( static_cast( result ), messenger ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + VkResult result = d.vkCreateDebugUtilsMessengerEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( messenger, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( pCallbackData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + VkResult result = + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return createResultValueType( static_cast( result ), properties ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + VkResult result = + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return createResultValueType( static_cast( result ), structureChain ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + struct AHardwareBuffer * buffer; + VkResult result = + d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + + return createResultValueType( static_cast( result ), buffer ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( static_cast( result ), pipeline ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + static_cast( result ), + UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( pSizeInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + VkResult result = d.vkGetExecutionGraphPipelineScratchSizeAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( &sizeInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); + + return createResultValueType( static_cast( result ), sizeInfo ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( pNodeInfo ), pNodeIndex ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getExecutionGraphPipelineNodeIndexAMDX( + VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint32_t nodeIndex; + VkResult result = d.vkGetExecutionGraphPipelineNodeIndexAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( &nodeInfo ), &nodeIndex ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); + + return createResultValueType( static_cast( result ), nodeIndex ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast( scratch ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast( scratch ), reinterpret_cast( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast( scratch ), reinterpret_cast( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectAMDX( + m_commandBuffer, static_cast( scratch ), reinterpret_cast( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDispatchGraphIndirectAMDX( + m_commandBuffer, static_cast( scratch ), reinterpret_cast( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast( scratch ), static_cast( countInfo ) ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); + + return multisampleProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_memory_requirements2 === + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateAccelerationStructureKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + VkResult result = d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + + return createResultValueType( static_cast( result ), accelerationStructure ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + VkResult result = d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( + m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( + m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( pIndirectDeviceAddresses ), + pIndirectStrides, + ppMaxPrimitiveCounts ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBuildAccelerationStructuresKHR( m_device, + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + VkResult result = + d.vkBuildAccelerationStructuresKHR( m_device, + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + resultCheck( + static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyAccelerationStructureKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyAccelerationStructureKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); + resultCheck( + static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyAccelerationStructureToMemoryKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); + resultCheck( + static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyMemoryToAccelerationStructureKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); + resultCheck( + static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + + return createResultValueType( static_cast( result ), data ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkDeviceAddress result = + d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureBuildSizesKHR( m_device, + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + pMaxPrimitiveCounts, + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetAccelerationStructureBuildSizesKHR( m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + width, + height, + depth ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VkResult result = d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( static_cast( result ), pipeline ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + static_cast( result ), + UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + + return createResultValueType( static_cast( result ), data ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + + return createResultValueType( static_cast( result ), data ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast( pipeline ), group, static_cast( groupShader ) ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VkResult result = d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + + return createResultValueType( static_cast( result ), ycbcrConversion ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VkResult result = d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversionKHR( + m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroySamplerYcbcrConversionKHR( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_bind_memory2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_image_drm_format_modifier === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, static_cast( image ), reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_validation_cache === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateValidationCacheEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pValidationCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + VkResult result = d.vkCreateValidationCacheEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); + + return createResultValueType( static_cast( result ), validationCache ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + VkResult result = d.vkCreateValidationCacheEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( validationCache, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( + m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyValidationCacheEXT( + m_device, + static_cast( validationCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( + m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyValidationCacheEXT( + m_device, + static_cast( validationCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMergeValidationCachesEXT( + m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkMergeValidationCachesEXT( + m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector data; + size_t dataSize; + VkResult result; + do + { + result = d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return createResultValueType( static_cast( result ), data ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector data( uint8_tAllocator ); + size_t dataSize; + VkResult result; + do + { + result = d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_shading_rate_image === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast( shadingRatePalettes.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrderCount, + reinterpret_cast( pCustomSampleOrders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateAccelerationStructureNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + VkResult result = d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + + return createResultValueType( static_cast( result ), accelerationStructure ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + VkResult result = d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( + m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( + m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBindAccelerationStructureMemoryNV( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( pInfo ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysNV( m_commandBuffer, + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VkResult result = d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( static_cast( result ), pipeline ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + static_cast( result ), + UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + + return createResultValueType( static_cast( result ), data ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + + return createResultValueType( static_cast( result ), data ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), sizeof( DataType ), reinterpret_cast( &data ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_draw_indirect_count === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_EXT_external_memory_host === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( pMemoryHostPointerProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + + return createResultValueType( static_cast( result ), memoryHostPointerProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_buffer_marker === + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector timeDomains; + uint32_t timeDomainCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ); + if ( ( result == VK_SUCCESS ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return createResultValueType( static_cast( result ), timeDomains ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector timeDomains( timeDomainKHRAllocator ); + uint32_t timeDomainCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ); + if ( ( result == VK_SUCCESS ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return createResultValueType( static_cast( result ), timeDomains ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetCalibratedTimestampsEXT( + m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = d.vkGetCalibratedTimestampsEXT( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return createResultValueType( static_cast( result ), data_ ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = d.vkGetCalibratedTimestampsEXT( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return createResultValueType( static_cast( result ), data_ ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = + d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + + return createResultValueType( static_cast( result ), data_ ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_mesh_shader === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_NV_scissor_exclusive === + + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExclusiveScissorEnableNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissorEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetExclusiveScissorEnableNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast( exclusiveScissorEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetExclusiveScissorNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast( exclusiveScissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast( &checkpointMarker ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector checkpointData( checkpointDataNVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_timeline_semaphore === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t value; + VkResult result = d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); + + return createResultValueType( static_cast( result ), value ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUninitializePerformanceApiINTEL( m_device ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast( pAcquireInfo ), + reinterpret_cast( pConfiguration ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + + return createResultValueType( static_cast( result ), configuration ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( configuration, ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPerformanceParameterINTEL( + m_device, static_cast( parameter ), reinterpret_cast( pValue ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + VkResult result = d.vkGetPerformanceParameterINTEL( + m_device, static_cast( parameter ), reinterpret_cast( &value ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + + return createResultValueType( static_cast( result ), value ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_display_native_hdr === + + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast( pFragmentShadingRates ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector fragmentShadingRates; + uint32_t fragmentShadingRateCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ); + if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return createResultValueType( static_cast( result ), fragmentShadingRates ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector fragmentShadingRates( + physicalDeviceFragmentShadingRateKHRAllocator ); + uint32_t fragmentShadingRateCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ); + if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return createResultValueType( static_cast( result ), fragmentShadingRates ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateKHR( + m_commandBuffer, reinterpret_cast( pFragmentSize ), reinterpret_cast( combinerOps ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetFragmentShadingRateKHR( + m_commandBuffer, reinterpret_cast( &fragmentSize ), reinterpret_cast( combinerOps ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_buffer_device_address === + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_tooling_info === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector toolProperties; + uint32_t toolCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ); + if ( ( result == VK_SUCCESS ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return createResultValueType( static_cast( result ), toolProperties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector toolProperties( + physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ); + if ( ( result == VK_SUCCESS ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return createResultValueType( static_cast( result ), toolProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_present_wait === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_NV_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( + uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( + cooperativeMatrixPropertiesNVAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_coverage_reduction_mode === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, pCombinationCount, reinterpret_cast( pCombinations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector combinations; + uint32_t combinationCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ); + if ( ( result == VK_SUCCESS ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return createResultValueType( static_cast( result ), combinations ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector combinations( + framebufferMixedSamplesCombinationNVAllocator ); + uint32_t combinationCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ); + if ( ( result == VK_SUCCESS ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return createResultValueType( static_cast( result ), combinations ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast( pSurfaceInfo ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector presentModes; + uint32_t presentModeCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ); + if ( ( result == VK_SUCCESS ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return createResultValueType( static_cast( result ), presentModes ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ); + if ( ( result == VK_SUCCESS ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return createResultValueType( static_cast( result ), presentModes ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VkResult result = d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + + return createResultValueType( static_cast( result ), modes ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateHeadlessSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateHeadlessSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_buffer_device_address === + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_line_rasterization === + + template + VULKAN_HPP_INLINE void + CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + template + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOpEXT( m_commandBuffer, + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + //=== VK_KHR_deferred_host_operations === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDeferredOperationKHR( + m_device, reinterpret_cast( pAllocator ), reinterpret_cast( pDeferredOperation ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDeferredOperationKHR( Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + VkResult result = d.vkCreateDeferredOperationKHR( + m_device, + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + + return createResultValueType( static_cast( result ), deferredOperation ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + VkResult result = d.vkCreateDeferredOperationKHR( + m_device, + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( deferredOperation, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( + m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDeferredOperationKHR( + m_device, + static_cast( operation ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDeferredOperationKHR( + m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyDeferredOperationKHR( + m_device, + static_cast( operation ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + } + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( pPipelineInfo ), + pExecutableCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t executableCount; + VkResult result; + do + { + result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ); + if ( ( result == VK_SUCCESS ) && executableCount ) + { + properties.resize( executableCount ); + result = d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( + pipelineExecutablePropertiesKHRAllocator ); + uint32_t executableCount; + VkResult result; + do + { + result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ); + if ( ( result == VK_SUCCESS ) && executableCount ) + { + properties.resize( executableCount ); + result = d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( pExecutableInfo ), + pStatisticCount, + reinterpret_cast( pStatistics ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector statistics; + uint32_t statisticCount; + VkResult result; + do + { + result = + d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ); + if ( ( result == VK_SUCCESS ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return createResultValueType( static_cast( result ), statistics ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector statistics( + pipelineExecutableStatisticKHRAllocator ); + uint32_t statisticCount; + VkResult result; + do + { + result = + d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ); + if ( ( result == VK_SUCCESS ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return createResultValueType( static_cast( result ), statistics ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, + reinterpret_cast( pExecutableInfo ), + pInternalRepresentationCount, + reinterpret_cast( pInternalRepresentations ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector + internalRepresentations; + uint32_t internalRepresentationCount; + VkResult result; + do + { + result = d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ); + if ( ( result == VK_SUCCESS ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return createResultValueType( static_cast( result ), internalRepresentations ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector + internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); + uint32_t internalRepresentationCount; + VkResult result; + do + { + result = d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ); + if ( ( result == VK_SUCCESS ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return createResultValueType( static_cast( result ), internalRepresentations ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_host_image_copy === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( pCopyMemoryToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( ©MemoryToImageInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( pCopyImageToMemoryInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyImageToImageEXT( m_device, reinterpret_cast( pCopyImageToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyImageToImageEXT( m_device, reinterpret_cast( ©ImageToImageInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast( pTransitions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_map_memory2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMapMemory2KHR( m_device, reinterpret_cast( pMemoryMapInfo ), ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + void * pData; + VkResult result = d.vkMapMemory2KHR( m_device, reinterpret_cast( &memoryMapInfo ), &pData ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); + + return createResultValueType( static_cast( result ), pData ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( pMemoryUnmapInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkUnmapMemory2KHR( m_device, reinterpret_cast( &memoryUnmapInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_swapchain_maintenance1 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast( pReleaseInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast( &releaseInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands === + + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteGeneratedCommandsNV( + m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdExecuteGeneratedCommandsNV( + m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + VkResult result = d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); + + return createResultValueType( static_cast( result ), indirectCommandsLayout ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + VkResult result = d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( + indirectCommandsLayout, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( + m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutNV( + m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_depth_bias_control === + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast( pDepthBiasInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast( &depthBiasInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_acquire_drm_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId, + VULKAN_HPP_NAMESPACE::DisplayKHR * display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( display ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); + + return createResultValueType( static_cast( result ), display ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( display, ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_private_data === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePrivateDataSlotEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VkResult result = d.vkCreatePrivateDataSlotEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); + + return createResultValueType( static_cast( result ), privateDataSlot ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VkResult result = d.vkCreatePrivateDataSlotEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyPrivateDataSlotEXT( + m_device, + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkSetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint64_t data; + d.vkGetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast( pQualityLevelInfo ), + reinterpret_cast( pQualityLevelProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; + VkResult result = + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return createResultValueType( static_cast( result ), qualityLevelProperties ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = + structureChain.template get(); + VkResult result = + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return createResultValueType( static_cast( result ), structureChain ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( pVideoSessionParametersInfo ), + reinterpret_cast( pFeedbackInfo ), + pDataSize, + pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VkResult result; + do + { + result = d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return createResultValueType( static_cast( result ), data_ ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VkResult result; + do + { + result = d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return createResultValueType( static_cast( result ), data_ ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, std::vector>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair, std::vector> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VkResult result; + do + { + result = d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return createResultValueType( static_cast( result ), data_ ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, std::vector>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair, std::vector> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VkResult result; + do + { + result = d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return createResultValueType( static_cast( result ), data_ ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( pEncodeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( &encodeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCudaModuleNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CudaModuleNV module; + VkResult result = + d.vkCreateCudaModuleNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); + + return createResultValueType( static_cast( result ), module ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CudaModuleNV module; + VkResult result = + d.vkCreateCudaModuleNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + size_t * pCacheSize, + void * pCacheData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), pCacheSize, pCacheData ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector cacheData; + size_t cacheSize; + VkResult result; + do + { + result = d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, nullptr ); + if ( ( result == VK_SUCCESS ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, reinterpret_cast( cacheData.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return createResultValueType( static_cast( result ), cacheData ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector cacheData( uint8_tAllocator ); + size_t cacheSize; + VkResult result; + do + { + result = d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, nullptr ); + if ( ( result == VK_SUCCESS ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, reinterpret_cast( cacheData.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return createResultValueType( static_cast( result ), cacheData ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CudaFunctionNV function; + VkResult result = + d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); + + return createResultValueType( static_cast( result ), function ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::CudaFunctionNV function; + VkResult result = + d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaModuleNV( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCudaModuleNV( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaModuleNV( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCudaModuleNV( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaFunctionNV( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCudaFunctionNV( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCudaFunctionNV( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyCudaFunctionNV( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast( pLaunchInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + template + VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( pMetalObjectsInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT + Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; + d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( &metalObjectsInfo ) ); + + return metalObjectsInfo; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get(); + d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( &metalObjectsInfo ) ); + + return structureChain; + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2KHR( + m_commandBuffer, eventCount, reinterpret_cast( pEvents ), reinterpret_cast( pDependencyInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdWaitEvents2KHR( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit2KHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarker2AMD( + m_commandBuffer, static_cast( stage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); + } + + template + VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_descriptor_buffer === + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast( layout ), reinterpret_cast( pLayoutSizeInBytes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; + d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast( layout ), reinterpret_cast( &layoutSizeInBytes ) ); + + return layoutSizeInBytes; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast( layout ), binding, reinterpret_cast( pOffset ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( + VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DeviceSize offset; + d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast( layout ), binding, reinterpret_cast( &offset ) ); + + return offset; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorEXT( m_device, reinterpret_cast( pDescriptorInfo ), dataSize, pDescriptor ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkGetDescriptorEXT( m_device, reinterpret_cast( &descriptorInfo ), dataSize, pDescriptor ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DescriptorType descriptor; + d.vkGetDescriptorEXT( + m_device, reinterpret_cast( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast( &descriptor ) ); + + return descriptor; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, + const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast( pBindingInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast( bindingInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + setCount, + pBufferIndices, + reinterpret_cast( pOffsets ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); +# else + if ( bufferIndices.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + bufferIndices.size(), + bufferIndices.data(), + reinterpret_cast( offsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = + d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + m_device, reinterpret_cast( pInfo ), pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + m_device, reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_fragment_shading_rate_enums === + + template + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFragmentShadingRateEnumNV( + m_commandBuffer, static_cast( shadingRate ), reinterpret_cast( combinerOps ) ); + } + + //=== VK_EXT_mesh_shader === + + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_KHR_copy_commands2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_fault === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceFaultInfoEXT( + m_device, reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::getFaultInfoEXT( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair data_; + VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first; + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second; + VkResult result = + d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast( &faultCounts ), reinterpret_cast( &faultInfo ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + + return ResultValue>( + static_cast( result ), data_ ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); + + return createResultValueType( static_cast( result ), display ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( display, ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDirectFBSurfaceEXT( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateDirectFBSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateDirectFBSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetVertexInputEXT( m_commandBuffer, + vertexBindingDescriptionCount, + reinterpret_cast( pVertexBindingDescriptions ), + vertexAttributeDescriptionCount, + reinterpret_cast( pVertexAttributeDescriptions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetVertexInputEXT( m_commandBuffer, + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + zx_handle_t zirconHandle; + VkResult result = + d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + + return createResultValueType( static_cast( result ), zirconHandle ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( pMemoryZirconHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + VkResult result = d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + + return createResultValueType( static_cast( result ), memoryZirconHandleProperties ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, reinterpret_cast( &importSemaphoreZirconHandleInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + zx_handle_t zirconHandle; + VkResult result = + d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + + return createResultValueType( static_cast( result ), zirconHandle ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateBufferCollectionFUCHSIA( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCollection ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + VkResult result = d.vkCreateBufferCollectionFUCHSIA( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &collection ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); + + return createResultValueType( static_cast( result ), collection ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + VkResult result = d.vkCreateBufferCollectionFUCHSIA( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &collection ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( collection, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( pImageConstraintsInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkSetBufferCollectionImageConstraintsFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &imageConstraintsInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( pBufferConstraintsInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkSetBufferCollectionBufferConstraintsFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &bufferConstraintsInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); + + return createResultValueType( static_cast( result ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyBufferCollectionFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyBufferCollectionFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( pAllocator ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyBufferCollectionFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; + VkResult result = d.vkGetBufferCollectionPropertiesFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); + + return createResultValueType( static_cast( result ), properties ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + m_device, static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; + VkResult result = d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + m_device, static_cast( renderpass ), reinterpret_cast( &maxWorkgroupSize ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + + return ResultValue( static_cast( result ), maxWorkgroupSize ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); + } + + //=== VK_HUAWEI_invocation_mask === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } + + //=== VK_NV_external_memory_rdma === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryRemoteAddressNV( + m_device, reinterpret_cast( pMemoryGetRemoteAddressInfo ), reinterpret_cast( pAddress ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::RemoteAddressNV address; + VkResult result = d.vkGetMemoryRemoteAddressNV( + m_device, reinterpret_cast( &memoryGetRemoteAddressInfo ), reinterpret_cast( &address ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + + return createResultValueType( static_cast( result ), address ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_pipeline_properties === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, + VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelinePropertiesEXT( + m_device, reinterpret_cast( pPipelineInfo ), reinterpret_cast( pPipelineProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; + VkResult result = d.vkGetPipelinePropertiesEXT( + m_device, reinterpret_cast( &pipelineInfo ), reinterpret_cast( &pipelineProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + + return createResultValueType( static_cast( result ), pipelineProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_extended_dynamic_state2 === + + template + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast( depthBiasEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast( logicOp ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateScreenSurfaceQNX( m_instance, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateScreenSurfaceQNX( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); + + return createResultValueType( static_cast( result ), surface ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VkResult result = d.vkCreateScreenSurfaceQNX( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); + + return static_cast( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast( pColorWriteEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_maintenance1 === + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast( indirectDeviceAddress ) ); + } + + //=== VK_EXT_multi_draw === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast( pVertexInfo ), instanceCount, firstInstance, stride ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDrawMultiEXT( m_commandBuffer, + vertexInfo.size(), + reinterpret_cast( vertexInfo.data() ), + instanceCount, + firstInstance, + vertexInfo.stride() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawMultiIndexedEXT( + m_commandBuffer, drawCount, reinterpret_cast( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, + indexInfo.size(), + reinterpret_cast( indexInfo.data() ), + instanceCount, + firstInstance, + indexInfo.stride(), + static_cast( vertexOffset ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_opacity_micromap === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateMicromapEXT( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMicromap ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MicromapEXT micromap; + VkResult result = + d.vkCreateMicromapEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( µmap ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); + + return createResultValueType( static_cast( result ), micromap ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MicromapEXT micromap; + VkResult result = + d.vkCreateMicromapEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( µmap ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( micromap, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyMicromapEXT( m_device, static_cast( micromap ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyMicromapEXT( m_device, + static_cast( micromap ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyMicromapEXT( m_device, static_cast( micromap ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyMicromapEXT( m_device, + static_cast( micromap ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast( pInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast( infos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBuildMicromapsEXT( + m_device, static_cast( deferredOperation ), infoCount, reinterpret_cast( pInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBuildMicromapsEXT( + m_device, static_cast( deferredOperation ), infos.size(), reinterpret_cast( infos.data() ) ); + resultCheck( + static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkCopyMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); + resultCheck( + static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMicromapToMemoryEXT( + m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyMicromapToMemoryEXT( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); + resultCheck( + static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMemoryToMicromapEXT( + m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyMemoryToMicromapEXT( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); + resultCheck( + static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteMicromapsPropertiesEXT( + m_device, micromapCount, reinterpret_cast( pMicromaps ), static_cast( queryType ), dataSize, pData, stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); + + return createResultValueType( static_cast( result ), data ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + DataType data; + VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, + micromapCount, + reinterpret_cast( pMicromaps ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceMicromapCompatibilityEXT( m_device, + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceMicromapCompatibilityEXT( m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetMicromapBuildSizesEXT( m_device, + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; + d.vkGetMicromapBuildSizesEXT( m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_HUAWEI_cluster_culling_shader === + + template + VULKAN_HPP_INLINE void + CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + } + + //=== VK_EXT_pageable_device_local_memory === + + template + VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast( memory ), priority ); + } + + //=== VK_KHR_maintenance4 === + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VALVE_descriptor_set_host_mapping === + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, + reinterpret_cast( pBindingReference ), + reinterpret_cast( pHostMapping ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE + Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, + reinterpret_cast( &bindingReference ), + reinterpret_cast( &hostMapping ) ); + + return hostMapping; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast( descriptorSet ), ppData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + void * pData; + d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast( descriptorSet ), &pData ); + + return pData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_copy_memory_indirect === + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast( copyBufferAddress ), copyCount, stride ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + static_cast( copyBufferAddress ), + copyCount, + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( pImageSubresources ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + static_cast( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( imageSubresources.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_memory_decompression === + + template + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, + const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast( pDecompressMemoryRegions ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDecompressMemoryNV( + m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast( decompressMemoryRegions.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDecompressMemoryIndirectCountNV( + m_commandBuffer, static_cast( indirectCommandsAddress ), static_cast( indirectCommandsCountAddress ), stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + template + VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + template + VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_extended_dynamic_state3 === + + template + VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast( domainOrigin ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast( depthClampEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast( polygonMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast( rasterizationSamples ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast( samples ), reinterpret_cast( pSampleMask ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast( samples ) + 31 ) / 32 ); +# else + if ( sampleMask.size() != ( static_cast( samples ) + 31 ) / 32 ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast( samples ) + 31 ) / 32" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast( samples ), reinterpret_cast( sampleMask.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast( alphaToCoverageEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast( alphaToOneEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast( logicOpEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEnables ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast( colorBlendEnables.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendEquationEXT( + m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEquations ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetColorBlendEquationEXT( + m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast( colorBlendEquations.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast( pColorWriteMasks ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetColorWriteMaskEXT( + m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast( colorWriteMasks.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream ); + } + + template + VULKAN_HPP_INLINE void + CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast( conservativeRasterizationMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast( depthClipEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast( sampleLocationsEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendAdvancedEXT( + m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast( pColorBlendAdvanced ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetColorBlendAdvancedEXT( + m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast( colorBlendAdvanced.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast( provokingVertexMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast( lineRasterizationMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast( stippledLineEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast( negativeOneToOne ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast( viewportWScalingEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportSwizzles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetViewportSwizzleNV( + m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast( viewportSwizzles.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast( coverageToColorEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast( coverageModulationMode ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast( coverageModulationTableEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast( shadingRateImageEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast( representativeFragmentTestEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast( coverageReductionMode ) ); + } + + //=== VK_EXT_shader_module_identifier === + + template + VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetShaderModuleIdentifierEXT( m_device, static_cast( shaderModule ), reinterpret_cast( pIdentifier ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + d.vkGetShaderModuleIdentifierEXT( m_device, static_cast( shaderModule ), reinterpret_cast( &identifier ) ); + + return identifier; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetShaderModuleCreateInfoIdentifierEXT( + m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pIdentifier ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + d.vkGetShaderModuleCreateInfoIdentifierEXT( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &identifier ) ); + + return identifier; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_optical_flow === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast( pOpticalFlowImageFormatInfo ), + pFormatCount, + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector imageFormatProperties; + uint32_t formatCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + m_physicalDevice, reinterpret_cast( &opticalFlowImageFormatInfo ), &formatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return createResultValueType( static_cast( result ), imageFormatProperties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector imageFormatProperties( + opticalFlowImageFormatPropertiesNVAllocator ); + uint32_t formatCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + m_physicalDevice, reinterpret_cast( &opticalFlowImageFormatInfo ), &formatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return createResultValueType( static_cast( result ), imageFormatProperties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateOpticalFlowSessionNV( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSession ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; + VkResult result = d.vkCreateOpticalFlowSessionNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &session ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); + + return createResultValueType( static_cast( result ), session ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; + VkResult result = d.vkCreateOpticalFlowSessionNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &session ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); + + return createResultValueType( + static_cast( result ), + UniqueHandle( session, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyOpticalFlowSessionNV( m_device, static_cast( session ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyOpticalFlowSessionNV( + m_device, + static_cast( session ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyOpticalFlowSessionNV( m_device, static_cast( session ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyOpticalFlowSessionNV( + m_device, + static_cast( session ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkBindOpticalFlowSessionImageNV( m_device, + static_cast( session ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkBindOpticalFlowSessionImageNV( m_device, + static_cast( session ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdOpticalFlowExecuteNV( + m_commandBuffer, static_cast( session ), reinterpret_cast( pExecuteInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdOpticalFlowExecuteNV( + m_commandBuffer, static_cast( session ), reinterpret_cast( &executeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance5 === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer2KHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + template + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderingAreaGranularityKHR( + m_device, reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderingAreaGranularityKHR( + m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_object === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateShadersEXT( m_device, + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaders ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector shaders( createInfos.size() ); + VkResult result = + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" ); + + return createResultValueType( static_cast( result ), shaders ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector shaders( createInfos.size(), shaderEXTAllocator ); + VkResult result = + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" ); + + return createResultValueType( static_cast( result ), shaders ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VkResult result = + d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" ); + + return createResultValueType( static_cast( result ), shader ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, ShaderEXTAllocator>>::type + Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector shaders( createInfos.size() ); + VkResult result = + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" ); + std::vector, ShaderEXTAllocator> uniqueShaders; + uniqueShaders.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueShaders ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, ShaderEXTAllocator>>::type + Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector shaders( createInfos.size() ); + VkResult result = + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" ); + std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); + uniqueShaders.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); + } + return createResultValueType( static_cast( result ), std::move( uniqueShaders ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VkResult result = + d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" ); + + return createResultValueType( static_cast( result ), + UniqueHandle( shader, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderEXT( m_device, static_cast( shader ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyShaderEXT( m_device, + static_cast( shader ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyShaderEXT( m_device, static_cast( shader ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkDestroyShaderEXT( m_device, + static_cast( shader ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector data; + size_t dataSize; + VkResult result; + do + { + result = d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return createResultValueType( static_cast( result ), data ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector data( uint8_tAllocator ); + size_t dataSize; + VkResult result; + do + { + result = d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return createResultValueType( static_cast( result ), data ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, + const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadersEXT( + m_commandBuffer, stageCount, reinterpret_cast( pStages ), reinterpret_cast( pShaders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); +# else + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindShadersEXT( m_commandBuffer, + stages.size(), + reinterpret_cast( stages.data() ), + reinterpret_cast( shaders.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_properties === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + uint32_t * pPropertiesCount, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast( framebuffer ), pPropertiesCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertiesCount; + VkResult result; + do + { + result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast( framebuffer ), &propertiesCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast( framebuffer ), &propertiesCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( tilePropertiesQCOMAllocator ); + uint32_t propertiesCount; + VkResult result; + do + { + result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast( framebuffer ), &propertiesCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast( framebuffer ), &propertiesCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDynamicRenderingTilePropertiesQCOM( + m_device, reinterpret_cast( pRenderingInfo ), reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; + d.vkGetDynamicRenderingTilePropertiesQCOM( + m_device, reinterpret_cast( &renderingInfo ), reinterpret_cast( &properties ) ); + + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetLatencySleepModeNV( m_device, static_cast( swapchain ), reinterpret_cast( pSleepModeInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkSetLatencySleepModeNV( m_device, static_cast( swapchain ), reinterpret_cast( &sleepModeInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkLatencySleepNV( m_device, static_cast( swapchain ), reinterpret_cast( pSleepInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type + Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkLatencySleepNV( m_device, static_cast( swapchain ), reinterpret_cast( &sleepInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::latencySleepNV" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetLatencyMarkerNV( m_device, static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkSetLatencyMarkerNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV + Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + + return latencyMarkerInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast( pQueueTypeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast( &queueTypeInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR( + uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector properties( + cooperativeMatrixPropertiesKHRAllocator ); + uint32_t propertyCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return createResultValueType( static_cast( result ), properties ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + template + VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast( aspectMask ) ); + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; + VkResult result = d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return createResultValueType( static_cast( result ), properties ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get(); + VkResult result = d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return createResultValueType( static_cast( result ), structureChain ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector timeDomains; + uint32_t timeDomainCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ); + if ( ( result == VK_SUCCESS ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return createResultValueType( static_cast( result ), timeDomains ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector timeDomains( timeDomainKHRAllocator ); + uint32_t timeDomainCount; + VkResult result; + do + { + result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ); + if ( ( result == VK_SUCCESS ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return createResultValueType( static_cast( result ), timeDomains ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetCalibratedTimestampsKHR( + m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = d.vkGetCalibratedTimestampsKHR( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return createResultValueType( static_cast( result ), data_ ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = d.vkGetCalibratedTimestampsKHR( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return createResultValueType( static_cast( result ), data_ ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = + d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); + + return createResultValueType( static_cast( result ), data_ ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance6 === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast( pBindDescriptorSetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast( &bindDescriptorSetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast( pPushConstantsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast( &pushConstantsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast( pPushDescriptorSetInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast( &pushDescriptorSetInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, + reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, + reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast( pSetDescriptorBufferOffsetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast( &setDescriptorBufferOffsetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + m_commandBuffer, reinterpret_cast( pBindDescriptorBufferEmbeddedSamplersInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + m_commandBuffer, reinterpret_cast( &bindDescriptorBufferEmbeddedSamplersInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_ggp.h b/MacroLibX/third_party/vulkan/vulkan_ggp.h new file mode 100644 index 0000000..9783aa3 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_ggp.h @@ -0,0 +1,60 @@ +#ifndef VULKAN_GGP_H_ +#define VULKAN_GGP_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_GGP_stream_descriptor_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_GGP_stream_descriptor_surface 1 +#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1 +#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface" +typedef VkFlags VkStreamDescriptorSurfaceCreateFlagsGGP; +typedef struct VkStreamDescriptorSurfaceCreateInfoGGP { + VkStructureType sType; + const void* pNext; + VkStreamDescriptorSurfaceCreateFlagsGGP flags; + GgpStreamDescriptor streamDescriptor; +} VkStreamDescriptorSurfaceCreateInfoGGP; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateStreamDescriptorSurfaceGGP)(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP( + VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_GGP_frame_token is a preprocessor guard. Do not pass it to API calls. +#define VK_GGP_frame_token 1 +#define VK_GGP_FRAME_TOKEN_SPEC_VERSION 1 +#define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token" +typedef struct VkPresentFrameTokenGGP { + VkStructureType sType; + const void* pNext; + GgpFrameToken frameToken; +} VkPresentFrameTokenGGP; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_handles.hpp b/MacroLibX/third_party/vulkan/vulkan_handles.hpp new file mode 100644 index 0000000..469db32 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_handles.hpp @@ -0,0 +1,16190 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HANDLES_HPP +#define VULKAN_HANDLES_HPP + +namespace VULKAN_HPP_NAMESPACE +{ + + //=================================== + //=== STRUCT forward declarations === + //=================================== + + //=== VK_VERSION_1_0 === + struct Extent2D; + struct Extent3D; + struct Offset2D; + struct Offset3D; + struct Rect2D; + struct BaseInStructure; + struct BaseOutStructure; + struct BufferMemoryBarrier; + struct DispatchIndirectCommand; + struct DrawIndexedIndirectCommand; + struct DrawIndirectCommand; + struct ImageMemoryBarrier; + struct MemoryBarrier; + struct PipelineCacheHeaderVersionOne; + struct AllocationCallbacks; + struct ApplicationInfo; + struct FormatProperties; + struct ImageFormatProperties; + struct InstanceCreateInfo; + struct MemoryHeap; + struct MemoryType; + struct PhysicalDeviceFeatures; + struct PhysicalDeviceLimits; + struct PhysicalDeviceMemoryProperties; + struct PhysicalDeviceProperties; + struct PhysicalDeviceSparseProperties; + struct QueueFamilyProperties; + struct DeviceCreateInfo; + struct DeviceQueueCreateInfo; + struct ExtensionProperties; + struct LayerProperties; + struct SubmitInfo; + struct MappedMemoryRange; + struct MemoryAllocateInfo; + struct MemoryRequirements; + struct BindSparseInfo; + struct ImageSubresource; + struct SparseBufferMemoryBindInfo; + struct SparseImageFormatProperties; + struct SparseImageMemoryBind; + struct SparseImageMemoryBindInfo; + struct SparseImageMemoryRequirements; + struct SparseImageOpaqueMemoryBindInfo; + struct SparseMemoryBind; + struct FenceCreateInfo; + struct SemaphoreCreateInfo; + struct EventCreateInfo; + struct QueryPoolCreateInfo; + struct BufferCreateInfo; + struct BufferViewCreateInfo; + struct ImageCreateInfo; + struct SubresourceLayout; + struct ComponentMapping; + struct ImageSubresourceRange; + struct ImageViewCreateInfo; + struct ShaderModuleCreateInfo; + struct PipelineCacheCreateInfo; + struct ComputePipelineCreateInfo; + struct GraphicsPipelineCreateInfo; + struct PipelineColorBlendAttachmentState; + struct PipelineColorBlendStateCreateInfo; + struct PipelineDepthStencilStateCreateInfo; + struct PipelineDynamicStateCreateInfo; + struct PipelineInputAssemblyStateCreateInfo; + struct PipelineMultisampleStateCreateInfo; + struct PipelineRasterizationStateCreateInfo; + struct PipelineShaderStageCreateInfo; + struct PipelineTessellationStateCreateInfo; + struct PipelineVertexInputStateCreateInfo; + struct PipelineViewportStateCreateInfo; + struct SpecializationInfo; + struct SpecializationMapEntry; + struct StencilOpState; + struct VertexInputAttributeDescription; + struct VertexInputBindingDescription; + struct Viewport; + struct PipelineLayoutCreateInfo; + struct PushConstantRange; + struct SamplerCreateInfo; + struct CopyDescriptorSet; + struct DescriptorBufferInfo; + struct DescriptorImageInfo; + struct DescriptorPoolCreateInfo; + struct DescriptorPoolSize; + struct DescriptorSetAllocateInfo; + struct DescriptorSetLayoutBinding; + struct DescriptorSetLayoutCreateInfo; + struct WriteDescriptorSet; + struct AttachmentDescription; + struct AttachmentReference; + struct FramebufferCreateInfo; + struct RenderPassCreateInfo; + struct SubpassDependency; + struct SubpassDescription; + struct CommandPoolCreateInfo; + struct CommandBufferAllocateInfo; + struct CommandBufferBeginInfo; + struct CommandBufferInheritanceInfo; + struct BufferCopy; + struct BufferImageCopy; + struct ClearAttachment; + union ClearColorValue; + struct ClearDepthStencilValue; + struct ClearRect; + union ClearValue; + struct ImageBlit; + struct ImageCopy; + struct ImageResolve; + struct ImageSubresourceLayers; + struct RenderPassBeginInfo; + + //=== VK_VERSION_1_1 === + struct PhysicalDeviceSubgroupProperties; + struct BindBufferMemoryInfo; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + struct BindImageMemoryInfo; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + struct PhysicalDevice16BitStorageFeatures; + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + struct MemoryDedicatedRequirements; + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + struct MemoryDedicatedAllocateInfo; + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + struct MemoryAllocateFlagsInfo; + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + struct DeviceGroupRenderPassBeginInfo; + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + struct DeviceGroupCommandBufferBeginInfo; + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + struct DeviceGroupSubmitInfo; + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + struct DeviceGroupBindSparseInfo; + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + struct BindBufferMemoryDeviceGroupInfo; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + struct BindImageMemoryDeviceGroupInfo; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + struct PhysicalDeviceGroupProperties; + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + struct DeviceGroupDeviceCreateInfo; + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + struct BufferMemoryRequirementsInfo2; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + struct ImageMemoryRequirementsInfo2; + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + struct ImageSparseMemoryRequirementsInfo2; + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + struct MemoryRequirements2; + using MemoryRequirements2KHR = MemoryRequirements2; + struct SparseImageMemoryRequirements2; + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + struct PhysicalDeviceFeatures2; + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + struct PhysicalDeviceProperties2; + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + struct FormatProperties2; + using FormatProperties2KHR = FormatProperties2; + struct ImageFormatProperties2; + using ImageFormatProperties2KHR = ImageFormatProperties2; + struct PhysicalDeviceImageFormatInfo2; + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + struct QueueFamilyProperties2; + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + struct PhysicalDeviceMemoryProperties2; + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + struct SparseImageFormatProperties2; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + struct PhysicalDeviceSparseImageFormatInfo2; + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + struct PhysicalDevicePointClippingProperties; + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + struct RenderPassInputAttachmentAspectCreateInfo; + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + struct InputAttachmentAspectReference; + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + struct ImageViewUsageCreateInfo; + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + struct PipelineTessellationDomainOriginStateCreateInfo; + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + struct RenderPassMultiviewCreateInfo; + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + struct PhysicalDeviceMultiviewFeatures; + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + struct PhysicalDeviceMultiviewProperties; + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + struct PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + struct PhysicalDeviceProtectedMemoryFeatures; + struct PhysicalDeviceProtectedMemoryProperties; + struct DeviceQueueInfo2; + struct ProtectedSubmitInfo; + struct SamplerYcbcrConversionCreateInfo; + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + struct SamplerYcbcrConversionInfo; + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + struct BindImagePlaneMemoryInfo; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + struct ImagePlaneMemoryRequirementsInfo; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + struct PhysicalDeviceSamplerYcbcrConversionFeatures; + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + struct SamplerYcbcrConversionImageFormatProperties; + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + struct DescriptorUpdateTemplateEntry; + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + struct DescriptorUpdateTemplateCreateInfo; + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + struct ExternalMemoryProperties; + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + struct PhysicalDeviceExternalImageFormatInfo; + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + struct ExternalImageFormatProperties; + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + struct PhysicalDeviceExternalBufferInfo; + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + struct ExternalBufferProperties; + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + struct PhysicalDeviceIDProperties; + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + struct ExternalMemoryImageCreateInfo; + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + struct ExternalMemoryBufferCreateInfo; + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + struct ExportMemoryAllocateInfo; + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + struct PhysicalDeviceExternalFenceInfo; + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + struct ExternalFenceProperties; + using ExternalFencePropertiesKHR = ExternalFenceProperties; + struct ExportFenceCreateInfo; + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + struct ExportSemaphoreCreateInfo; + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + struct PhysicalDeviceExternalSemaphoreInfo; + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + struct ExternalSemaphoreProperties; + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + struct PhysicalDeviceMaintenance3Properties; + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + struct DescriptorSetLayoutSupport; + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + struct PhysicalDeviceShaderDrawParametersFeatures; + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + //=== VK_VERSION_1_2 === + struct PhysicalDeviceVulkan11Features; + struct PhysicalDeviceVulkan11Properties; + struct PhysicalDeviceVulkan12Features; + struct PhysicalDeviceVulkan12Properties; + struct ImageFormatListCreateInfo; + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + struct RenderPassCreateInfo2; + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + struct AttachmentDescription2; + using AttachmentDescription2KHR = AttachmentDescription2; + struct AttachmentReference2; + using AttachmentReference2KHR = AttachmentReference2; + struct SubpassDescription2; + using SubpassDescription2KHR = SubpassDescription2; + struct SubpassDependency2; + using SubpassDependency2KHR = SubpassDependency2; + struct SubpassBeginInfo; + using SubpassBeginInfoKHR = SubpassBeginInfo; + struct SubpassEndInfo; + using SubpassEndInfoKHR = SubpassEndInfo; + struct PhysicalDevice8BitStorageFeatures; + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + struct ConformanceVersion; + using ConformanceVersionKHR = ConformanceVersion; + struct PhysicalDeviceDriverProperties; + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + struct PhysicalDeviceShaderAtomicInt64Features; + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + struct PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + struct PhysicalDeviceFloatControlsProperties; + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + struct DescriptorSetLayoutBindingFlagsCreateInfo; + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + struct PhysicalDeviceDescriptorIndexingFeatures; + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + struct PhysicalDeviceDescriptorIndexingProperties; + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + struct DescriptorSetVariableDescriptorCountAllocateInfo; + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; + struct DescriptorSetVariableDescriptorCountLayoutSupport; + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; + struct SubpassDescriptionDepthStencilResolve; + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; + struct PhysicalDeviceDepthStencilResolveProperties; + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + struct PhysicalDeviceScalarBlockLayoutFeatures; + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + struct ImageStencilUsageCreateInfo; + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + struct SamplerReductionModeCreateInfo; + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; + struct PhysicalDeviceSamplerFilterMinmaxProperties; + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + struct PhysicalDeviceVulkanMemoryModelFeatures; + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + struct PhysicalDeviceImagelessFramebufferFeatures; + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + struct FramebufferAttachmentsCreateInfo; + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + struct FramebufferAttachmentImageInfo; + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + struct RenderPassAttachmentBeginInfo; + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; + struct PhysicalDeviceUniformBufferStandardLayoutFeatures; + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + struct AttachmentReferenceStencilLayout; + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; + struct AttachmentDescriptionStencilLayout; + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; + struct PhysicalDeviceHostQueryResetFeatures; + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + struct PhysicalDeviceTimelineSemaphoreFeatures; + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + struct PhysicalDeviceTimelineSemaphoreProperties; + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; + struct SemaphoreTypeCreateInfo; + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + struct TimelineSemaphoreSubmitInfo; + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; + struct SemaphoreWaitInfo; + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + struct SemaphoreSignalInfo; + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + struct PhysicalDeviceBufferDeviceAddressFeatures; + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + struct BufferDeviceAddressInfo; + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; + struct BufferOpaqueCaptureAddressCreateInfo; + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + struct MemoryOpaqueCaptureAddressAllocateInfo; + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + struct DeviceMemoryOpaqueCaptureAddressInfo; + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + //=== VK_VERSION_1_3 === + struct PhysicalDeviceVulkan13Features; + struct PhysicalDeviceVulkan13Properties; + struct PipelineCreationFeedbackCreateInfo; + using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; + struct PipelineCreationFeedback; + using PipelineCreationFeedbackEXT = PipelineCreationFeedback; + struct PhysicalDeviceShaderTerminateInvocationFeatures; + using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; + struct PhysicalDeviceToolProperties; + using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; + struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + struct PhysicalDevicePrivateDataFeatures; + using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; + struct DevicePrivateDataCreateInfo; + using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; + struct PrivateDataSlotCreateInfo; + using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; + struct PhysicalDevicePipelineCreationCacheControlFeatures; + using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; + struct MemoryBarrier2; + using MemoryBarrier2KHR = MemoryBarrier2; + struct BufferMemoryBarrier2; + using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; + struct ImageMemoryBarrier2; + using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; + struct DependencyInfo; + using DependencyInfoKHR = DependencyInfo; + struct SubmitInfo2; + using SubmitInfo2KHR = SubmitInfo2; + struct SemaphoreSubmitInfo; + using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; + struct CommandBufferSubmitInfo; + using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; + struct PhysicalDeviceSynchronization2Features; + using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + struct PhysicalDeviceImageRobustnessFeatures; + using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; + struct CopyBufferInfo2; + using CopyBufferInfo2KHR = CopyBufferInfo2; + struct CopyImageInfo2; + using CopyImageInfo2KHR = CopyImageInfo2; + struct CopyBufferToImageInfo2; + using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; + struct CopyImageToBufferInfo2; + using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + struct BlitImageInfo2; + using BlitImageInfo2KHR = BlitImageInfo2; + struct ResolveImageInfo2; + using ResolveImageInfo2KHR = ResolveImageInfo2; + struct BufferCopy2; + using BufferCopy2KHR = BufferCopy2; + struct ImageCopy2; + using ImageCopy2KHR = ImageCopy2; + struct ImageBlit2; + using ImageBlit2KHR = ImageBlit2; + struct BufferImageCopy2; + using BufferImageCopy2KHR = BufferImageCopy2; + struct ImageResolve2; + using ImageResolve2KHR = ImageResolve2; + struct PhysicalDeviceSubgroupSizeControlFeatures; + using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; + struct PhysicalDeviceSubgroupSizeControlProperties; + using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; + struct PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + struct PhysicalDeviceInlineUniformBlockFeatures; + using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; + struct PhysicalDeviceInlineUniformBlockProperties; + using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; + struct WriteDescriptorSetInlineUniformBlock; + using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; + struct DescriptorPoolInlineUniformBlockCreateInfo; + using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; + struct PhysicalDeviceTextureCompressionASTCHDRFeatures; + using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; + struct RenderingInfo; + using RenderingInfoKHR = RenderingInfo; + struct RenderingAttachmentInfo; + using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; + struct PipelineRenderingCreateInfo; + using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; + struct PhysicalDeviceDynamicRenderingFeatures; + using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; + struct CommandBufferInheritanceRenderingInfo; + using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; + struct PhysicalDeviceShaderIntegerDotProductFeatures; + using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; + struct PhysicalDeviceShaderIntegerDotProductProperties; + using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; + struct PhysicalDeviceTexelBufferAlignmentProperties; + using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; + struct FormatProperties3; + using FormatProperties3KHR = FormatProperties3; + struct PhysicalDeviceMaintenance4Features; + using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; + struct PhysicalDeviceMaintenance4Properties; + using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; + struct DeviceBufferMemoryRequirements; + using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; + struct DeviceImageMemoryRequirements; + using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + + //=== VK_KHR_surface === + struct SurfaceCapabilitiesKHR; + struct SurfaceFormatKHR; + + //=== VK_KHR_swapchain === + struct SwapchainCreateInfoKHR; + struct PresentInfoKHR; + struct ImageSwapchainCreateInfoKHR; + struct BindImageMemorySwapchainInfoKHR; + struct AcquireNextImageInfoKHR; + struct DeviceGroupPresentCapabilitiesKHR; + struct DeviceGroupPresentInfoKHR; + struct DeviceGroupSwapchainCreateInfoKHR; + + //=== VK_KHR_display === + struct DisplayModeCreateInfoKHR; + struct DisplayModeParametersKHR; + struct DisplayModePropertiesKHR; + struct DisplayPlaneCapabilitiesKHR; + struct DisplayPlanePropertiesKHR; + struct DisplayPropertiesKHR; + struct DisplaySurfaceCreateInfoKHR; + + //=== VK_KHR_display_swapchain === + struct DisplayPresentInfoKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + struct XlibSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + struct XcbSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + struct WaylandSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + struct AndroidSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + struct Win32SurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + struct DebugReportCallbackCreateInfoEXT; + + //=== VK_AMD_rasterization_order === + struct PipelineRasterizationStateRasterizationOrderAMD; + + //=== VK_EXT_debug_marker === + struct DebugMarkerObjectNameInfoEXT; + struct DebugMarkerObjectTagInfoEXT; + struct DebugMarkerMarkerInfoEXT; + + //=== VK_KHR_video_queue === + struct QueueFamilyQueryResultStatusPropertiesKHR; + struct QueueFamilyVideoPropertiesKHR; + struct VideoProfileInfoKHR; + struct VideoProfileListInfoKHR; + struct VideoCapabilitiesKHR; + struct PhysicalDeviceVideoFormatInfoKHR; + struct VideoFormatPropertiesKHR; + struct VideoPictureResourceInfoKHR; + struct VideoReferenceSlotInfoKHR; + struct VideoSessionMemoryRequirementsKHR; + struct BindVideoSessionMemoryInfoKHR; + struct VideoSessionCreateInfoKHR; + struct VideoSessionParametersCreateInfoKHR; + struct VideoSessionParametersUpdateInfoKHR; + struct VideoBeginCodingInfoKHR; + struct VideoEndCodingInfoKHR; + struct VideoCodingControlInfoKHR; + + //=== VK_KHR_video_decode_queue === + struct VideoDecodeCapabilitiesKHR; + struct VideoDecodeUsageInfoKHR; + struct VideoDecodeInfoKHR; + + //=== VK_NV_dedicated_allocation === + struct DedicatedAllocationImageCreateInfoNV; + struct DedicatedAllocationBufferCreateInfoNV; + struct DedicatedAllocationMemoryAllocateInfoNV; + + //=== VK_EXT_transform_feedback === + struct PhysicalDeviceTransformFeedbackFeaturesEXT; + struct PhysicalDeviceTransformFeedbackPropertiesEXT; + struct PipelineRasterizationStateStreamCreateInfoEXT; + + //=== VK_NVX_binary_import === + struct CuModuleCreateInfoNVX; + struct CuFunctionCreateInfoNVX; + struct CuLaunchInfoNVX; + + //=== VK_NVX_image_view_handle === + struct ImageViewHandleInfoNVX; + struct ImageViewAddressPropertiesNVX; + + //=== VK_KHR_video_encode_h264 === + struct VideoEncodeH264CapabilitiesKHR; + struct VideoEncodeH264QualityLevelPropertiesKHR; + struct VideoEncodeH264SessionCreateInfoKHR; + struct VideoEncodeH264SessionParametersCreateInfoKHR; + struct VideoEncodeH264SessionParametersAddInfoKHR; + struct VideoEncodeH264SessionParametersGetInfoKHR; + struct VideoEncodeH264SessionParametersFeedbackInfoKHR; + struct VideoEncodeH264PictureInfoKHR; + struct VideoEncodeH264DpbSlotInfoKHR; + struct VideoEncodeH264NaluSliceInfoKHR; + struct VideoEncodeH264ProfileInfoKHR; + struct VideoEncodeH264RateControlInfoKHR; + struct VideoEncodeH264RateControlLayerInfoKHR; + struct VideoEncodeH264QpKHR; + struct VideoEncodeH264FrameSizeKHR; + struct VideoEncodeH264GopRemainingFrameInfoKHR; + + //=== VK_KHR_video_encode_h265 === + struct VideoEncodeH265CapabilitiesKHR; + struct VideoEncodeH265SessionCreateInfoKHR; + struct VideoEncodeH265QualityLevelPropertiesKHR; + struct VideoEncodeH265SessionParametersCreateInfoKHR; + struct VideoEncodeH265SessionParametersAddInfoKHR; + struct VideoEncodeH265SessionParametersGetInfoKHR; + struct VideoEncodeH265SessionParametersFeedbackInfoKHR; + struct VideoEncodeH265PictureInfoKHR; + struct VideoEncodeH265DpbSlotInfoKHR; + struct VideoEncodeH265NaluSliceSegmentInfoKHR; + struct VideoEncodeH265ProfileInfoKHR; + struct VideoEncodeH265RateControlInfoKHR; + struct VideoEncodeH265RateControlLayerInfoKHR; + struct VideoEncodeH265QpKHR; + struct VideoEncodeH265FrameSizeKHR; + struct VideoEncodeH265GopRemainingFrameInfoKHR; + + //=== VK_KHR_video_decode_h264 === + struct VideoDecodeH264ProfileInfoKHR; + struct VideoDecodeH264CapabilitiesKHR; + struct VideoDecodeH264SessionParametersCreateInfoKHR; + struct VideoDecodeH264SessionParametersAddInfoKHR; + struct VideoDecodeH264PictureInfoKHR; + struct VideoDecodeH264DpbSlotInfoKHR; + + //=== VK_AMD_texture_gather_bias_lod === + struct TextureLODGatherFormatPropertiesAMD; + + //=== VK_AMD_shader_info === + struct ShaderResourceUsageAMD; + struct ShaderStatisticsInfoAMD; + + //=== VK_KHR_dynamic_rendering === + struct RenderingFragmentShadingRateAttachmentInfoKHR; + struct RenderingFragmentDensityMapAttachmentInfoEXT; + struct AttachmentSampleCountInfoAMD; + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + struct MultiviewPerViewAttributesInfoNVX; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + struct StreamDescriptorSurfaceCreateInfoGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + struct PhysicalDeviceCornerSampledImageFeaturesNV; + + //=== VK_NV_external_memory_capabilities === + struct ExternalImageFormatPropertiesNV; + + //=== VK_NV_external_memory === + struct ExternalMemoryImageCreateInfoNV; + struct ExportMemoryAllocateInfoNV; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + struct ImportMemoryWin32HandleInfoNV; + struct ExportMemoryWin32HandleInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + struct Win32KeyedMutexAcquireReleaseInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_validation_flags === + struct ValidationFlagsEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + struct ViSurfaceCreateInfoNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_astc_decode_mode === + struct ImageViewASTCDecodeModeEXT; + struct PhysicalDeviceASTCDecodeFeaturesEXT; + + //=== VK_EXT_pipeline_robustness === + struct PhysicalDevicePipelineRobustnessFeaturesEXT; + struct PhysicalDevicePipelineRobustnessPropertiesEXT; + struct PipelineRobustnessCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + struct ImportMemoryWin32HandleInfoKHR; + struct ExportMemoryWin32HandleInfoKHR; + struct MemoryWin32HandlePropertiesKHR; + struct MemoryGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + struct ImportMemoryFdInfoKHR; + struct MemoryFdPropertiesKHR; + struct MemoryGetFdInfoKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + struct Win32KeyedMutexAcquireReleaseInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + struct ImportSemaphoreWin32HandleInfoKHR; + struct ExportSemaphoreWin32HandleInfoKHR; + struct D3D12FenceSubmitInfoKHR; + struct SemaphoreGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + struct ImportSemaphoreFdInfoKHR; + struct SemaphoreGetFdInfoKHR; + + //=== VK_KHR_push_descriptor === + struct PhysicalDevicePushDescriptorPropertiesKHR; + + //=== VK_EXT_conditional_rendering === + struct ConditionalRenderingBeginInfoEXT; + struct PhysicalDeviceConditionalRenderingFeaturesEXT; + struct CommandBufferInheritanceConditionalRenderingInfoEXT; + + //=== VK_KHR_incremental_present === + struct PresentRegionsKHR; + struct PresentRegionKHR; + struct RectLayerKHR; + + //=== VK_NV_clip_space_w_scaling === + struct ViewportWScalingNV; + struct PipelineViewportWScalingStateCreateInfoNV; + + //=== VK_EXT_display_surface_counter === + struct SurfaceCapabilities2EXT; + + //=== VK_EXT_display_control === + struct DisplayPowerInfoEXT; + struct DeviceEventInfoEXT; + struct DisplayEventInfoEXT; + struct SwapchainCounterCreateInfoEXT; + + //=== VK_GOOGLE_display_timing === + struct RefreshCycleDurationGOOGLE; + struct PastPresentationTimingGOOGLE; + struct PresentTimesInfoGOOGLE; + struct PresentTimeGOOGLE; + + //=== VK_NVX_multiview_per_view_attributes === + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + //=== VK_NV_viewport_swizzle === + struct ViewportSwizzleNV; + struct PipelineViewportSwizzleStateCreateInfoNV; + + //=== VK_EXT_discard_rectangles === + struct PhysicalDeviceDiscardRectanglePropertiesEXT; + struct PipelineDiscardRectangleStateCreateInfoEXT; + + //=== VK_EXT_conservative_rasterization === + struct PhysicalDeviceConservativeRasterizationPropertiesEXT; + struct PipelineRasterizationConservativeStateCreateInfoEXT; + + //=== VK_EXT_depth_clip_enable === + struct PhysicalDeviceDepthClipEnableFeaturesEXT; + struct PipelineRasterizationDepthClipStateCreateInfoEXT; + + //=== VK_EXT_hdr_metadata === + struct HdrMetadataEXT; + struct XYColorEXT; + + //=== VK_IMG_relaxed_line_rasterization === + struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + //=== VK_KHR_shared_presentable_image === + struct SharedPresentSurfaceCapabilitiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + struct ImportFenceWin32HandleInfoKHR; + struct ExportFenceWin32HandleInfoKHR; + struct FenceGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + struct ImportFenceFdInfoKHR; + struct FenceGetFdInfoKHR; + + //=== VK_KHR_performance_query === + struct PhysicalDevicePerformanceQueryFeaturesKHR; + struct PhysicalDevicePerformanceQueryPropertiesKHR; + struct PerformanceCounterKHR; + struct PerformanceCounterDescriptionKHR; + struct QueryPoolPerformanceCreateInfoKHR; + union PerformanceCounterResultKHR; + struct AcquireProfilingLockInfoKHR; + struct PerformanceQuerySubmitInfoKHR; + + //=== VK_KHR_get_surface_capabilities2 === + struct PhysicalDeviceSurfaceInfo2KHR; + struct SurfaceCapabilities2KHR; + struct SurfaceFormat2KHR; + + //=== VK_KHR_get_display_properties2 === + struct DisplayProperties2KHR; + struct DisplayPlaneProperties2KHR; + struct DisplayModeProperties2KHR; + struct DisplayPlaneInfo2KHR; + struct DisplayPlaneCapabilities2KHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + struct IOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + struct MacOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + struct DebugUtilsLabelEXT; + struct DebugUtilsMessengerCallbackDataEXT; + struct DebugUtilsMessengerCreateInfoEXT; + struct DebugUtilsObjectNameInfoEXT; + struct DebugUtilsObjectTagInfoEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + struct AndroidHardwareBufferUsageANDROID; + struct AndroidHardwareBufferPropertiesANDROID; + struct AndroidHardwareBufferFormatPropertiesANDROID; + struct ImportAndroidHardwareBufferInfoANDROID; + struct MemoryGetAndroidHardwareBufferInfoANDROID; + struct ExternalFormatANDROID; + struct AndroidHardwareBufferFormatProperties2ANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + struct PhysicalDeviceShaderEnqueueFeaturesAMDX; + struct PhysicalDeviceShaderEnqueuePropertiesAMDX; + struct ExecutionGraphPipelineScratchSizeAMDX; + struct ExecutionGraphPipelineCreateInfoAMDX; + struct DispatchGraphInfoAMDX; + struct DispatchGraphCountInfoAMDX; + struct PipelineShaderStageNodeCreateInfoAMDX; + union DeviceOrHostAddressConstAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + struct SampleLocationEXT; + struct SampleLocationsInfoEXT; + struct AttachmentSampleLocationsEXT; + struct SubpassSampleLocationsEXT; + struct RenderPassSampleLocationsBeginInfoEXT; + struct PipelineSampleLocationsStateCreateInfoEXT; + struct PhysicalDeviceSampleLocationsPropertiesEXT; + struct MultisamplePropertiesEXT; + + //=== VK_EXT_blend_operation_advanced === + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + struct PipelineColorBlendAdvancedStateCreateInfoEXT; + + //=== VK_NV_fragment_coverage_to_color === + struct PipelineCoverageToColorStateCreateInfoNV; + + //=== VK_KHR_acceleration_structure === + union DeviceOrHostAddressKHR; + union DeviceOrHostAddressConstKHR; + struct AccelerationStructureBuildRangeInfoKHR; + struct AabbPositionsKHR; + using AabbPositionsNV = AabbPositionsKHR; + struct AccelerationStructureGeometryTrianglesDataKHR; + struct TransformMatrixKHR; + using TransformMatrixNV = TransformMatrixKHR; + struct AccelerationStructureBuildGeometryInfoKHR; + struct AccelerationStructureGeometryAabbsDataKHR; + struct AccelerationStructureInstanceKHR; + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; + struct AccelerationStructureGeometryInstancesDataKHR; + union AccelerationStructureGeometryDataKHR; + struct AccelerationStructureGeometryKHR; + struct AccelerationStructureCreateInfoKHR; + struct WriteDescriptorSetAccelerationStructureKHR; + struct PhysicalDeviceAccelerationStructureFeaturesKHR; + struct PhysicalDeviceAccelerationStructurePropertiesKHR; + struct AccelerationStructureDeviceAddressInfoKHR; + struct AccelerationStructureVersionInfoKHR; + struct CopyAccelerationStructureToMemoryInfoKHR; + struct CopyMemoryToAccelerationStructureInfoKHR; + struct CopyAccelerationStructureInfoKHR; + struct AccelerationStructureBuildSizesInfoKHR; + + //=== VK_KHR_ray_tracing_pipeline === + struct RayTracingShaderGroupCreateInfoKHR; + struct RayTracingPipelineCreateInfoKHR; + struct PhysicalDeviceRayTracingPipelineFeaturesKHR; + struct PhysicalDeviceRayTracingPipelinePropertiesKHR; + struct StridedDeviceAddressRegionKHR; + struct TraceRaysIndirectCommandKHR; + struct RayTracingPipelineInterfaceCreateInfoKHR; + + //=== VK_KHR_ray_query === + struct PhysicalDeviceRayQueryFeaturesKHR; + + //=== VK_NV_framebuffer_mixed_samples === + struct PipelineCoverageModulationStateCreateInfoNV; + + //=== VK_NV_shader_sm_builtins === + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV; + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV; + + //=== VK_EXT_image_drm_format_modifier === + struct DrmFormatModifierPropertiesListEXT; + struct DrmFormatModifierPropertiesEXT; + struct PhysicalDeviceImageDrmFormatModifierInfoEXT; + struct ImageDrmFormatModifierListCreateInfoEXT; + struct ImageDrmFormatModifierExplicitCreateInfoEXT; + struct ImageDrmFormatModifierPropertiesEXT; + struct DrmFormatModifierPropertiesList2EXT; + struct DrmFormatModifierProperties2EXT; + + //=== VK_EXT_validation_cache === + struct ValidationCacheCreateInfoEXT; + struct ShaderModuleValidationCacheCreateInfoEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + struct PhysicalDevicePortabilitySubsetFeaturesKHR; + struct PhysicalDevicePortabilitySubsetPropertiesKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + struct ShadingRatePaletteNV; + struct PipelineViewportShadingRateImageStateCreateInfoNV; + struct PhysicalDeviceShadingRateImageFeaturesNV; + struct PhysicalDeviceShadingRateImagePropertiesNV; + struct CoarseSampleLocationNV; + struct CoarseSampleOrderCustomNV; + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV; + + //=== VK_NV_ray_tracing === + struct RayTracingShaderGroupCreateInfoNV; + struct RayTracingPipelineCreateInfoNV; + struct GeometryTrianglesNV; + struct GeometryAABBNV; + struct GeometryDataNV; + struct GeometryNV; + struct AccelerationStructureInfoNV; + struct AccelerationStructureCreateInfoNV; + struct BindAccelerationStructureMemoryInfoNV; + struct WriteDescriptorSetAccelerationStructureNV; + struct AccelerationStructureMemoryRequirementsInfoNV; + struct PhysicalDeviceRayTracingPropertiesNV; + + //=== VK_NV_representative_fragment_test === + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + struct PipelineRepresentativeFragmentTestStateCreateInfoNV; + + //=== VK_EXT_filter_cubic === + struct PhysicalDeviceImageViewImageFormatInfoEXT; + struct FilterCubicImageViewImageFormatPropertiesEXT; + + //=== VK_EXT_external_memory_host === + struct ImportMemoryHostPointerInfoEXT; + struct MemoryHostPointerPropertiesEXT; + struct PhysicalDeviceExternalMemoryHostPropertiesEXT; + + //=== VK_KHR_shader_clock === + struct PhysicalDeviceShaderClockFeaturesKHR; + + //=== VK_AMD_pipeline_compiler_control === + struct PipelineCompilerControlCreateInfoAMD; + + //=== VK_AMD_shader_core_properties === + struct PhysicalDeviceShaderCorePropertiesAMD; + + //=== VK_KHR_video_decode_h265 === + struct VideoDecodeH265ProfileInfoKHR; + struct VideoDecodeH265CapabilitiesKHR; + struct VideoDecodeH265SessionParametersCreateInfoKHR; + struct VideoDecodeH265SessionParametersAddInfoKHR; + struct VideoDecodeH265PictureInfoKHR; + struct VideoDecodeH265DpbSlotInfoKHR; + + //=== VK_KHR_global_priority === + struct DeviceQueueGlobalPriorityCreateInfoKHR; + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR; + struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + struct QueueFamilyGlobalPriorityPropertiesKHR; + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR; + + //=== VK_AMD_memory_overallocation_behavior === + struct DeviceMemoryOverallocationCreateInfoAMD; + + //=== VK_EXT_vertex_attribute_divisor === + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + struct PresentFrameTokenGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_compute_shader_derivatives === + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV; + + //=== VK_NV_mesh_shader === + struct PhysicalDeviceMeshShaderFeaturesNV; + struct PhysicalDeviceMeshShaderPropertiesNV; + struct DrawMeshTasksIndirectCommandNV; + + //=== VK_NV_shader_image_footprint === + struct PhysicalDeviceShaderImageFootprintFeaturesNV; + + //=== VK_NV_scissor_exclusive === + struct PipelineViewportExclusiveScissorStateCreateInfoNV; + struct PhysicalDeviceExclusiveScissorFeaturesNV; + + //=== VK_NV_device_diagnostic_checkpoints === + struct QueueFamilyCheckpointPropertiesNV; + struct CheckpointDataNV; + + //=== VK_INTEL_shader_integer_functions2 === + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + //=== VK_INTEL_performance_query === + union PerformanceValueDataINTEL; + struct PerformanceValueINTEL; + struct InitializePerformanceApiInfoINTEL; + struct QueryPoolPerformanceQueryCreateInfoINTEL; + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; + struct PerformanceMarkerInfoINTEL; + struct PerformanceStreamMarkerInfoINTEL; + struct PerformanceOverrideInfoINTEL; + struct PerformanceConfigurationAcquireInfoINTEL; + + //=== VK_EXT_pci_bus_info === + struct PhysicalDevicePCIBusInfoPropertiesEXT; + + //=== VK_AMD_display_native_hdr === + struct DisplayNativeHdrSurfaceCapabilitiesAMD; + struct SwapchainDisplayNativeHdrCreateInfoAMD; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + struct ImagePipeSurfaceCreateInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + struct MetalSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + struct PhysicalDeviceFragmentDensityMapFeaturesEXT; + struct PhysicalDeviceFragmentDensityMapPropertiesEXT; + struct RenderPassFragmentDensityMapCreateInfoEXT; + + //=== VK_KHR_fragment_shading_rate === + struct FragmentShadingRateAttachmentInfoKHR; + struct PipelineFragmentShadingRateStateCreateInfoKHR; + struct PhysicalDeviceFragmentShadingRateFeaturesKHR; + struct PhysicalDeviceFragmentShadingRatePropertiesKHR; + struct PhysicalDeviceFragmentShadingRateKHR; + + //=== VK_AMD_shader_core_properties2 === + struct PhysicalDeviceShaderCoreProperties2AMD; + + //=== VK_AMD_device_coherent_memory === + struct PhysicalDeviceCoherentMemoryFeaturesAMD; + + //=== VK_EXT_shader_image_atomic_int64 === + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + //=== VK_EXT_memory_budget === + struct PhysicalDeviceMemoryBudgetPropertiesEXT; + + //=== VK_EXT_memory_priority === + struct PhysicalDeviceMemoryPriorityFeaturesEXT; + struct MemoryPriorityAllocateInfoEXT; + + //=== VK_KHR_surface_protected_capabilities === + struct SurfaceProtectedCapabilitiesKHR; + + //=== VK_NV_dedicated_allocation_image_aliasing === + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + //=== VK_EXT_buffer_device_address === + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT; + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + struct BufferDeviceAddressCreateInfoEXT; + + //=== VK_EXT_validation_features === + struct ValidationFeaturesEXT; + + //=== VK_KHR_present_wait === + struct PhysicalDevicePresentWaitFeaturesKHR; + + //=== VK_NV_cooperative_matrix === + struct CooperativeMatrixPropertiesNV; + struct PhysicalDeviceCooperativeMatrixFeaturesNV; + struct PhysicalDeviceCooperativeMatrixPropertiesNV; + + //=== VK_NV_coverage_reduction_mode === + struct PhysicalDeviceCoverageReductionModeFeaturesNV; + struct PipelineCoverageReductionStateCreateInfoNV; + struct FramebufferMixedSamplesCombinationNV; + + //=== VK_EXT_fragment_shader_interlock === + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + //=== VK_EXT_ycbcr_image_arrays === + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT; + + //=== VK_EXT_provoking_vertex === + struct PhysicalDeviceProvokingVertexFeaturesEXT; + struct PhysicalDeviceProvokingVertexPropertiesEXT; + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + struct SurfaceFullScreenExclusiveInfoEXT; + struct SurfaceCapabilitiesFullScreenExclusiveEXT; + struct SurfaceFullScreenExclusiveWin32InfoEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + struct HeadlessSurfaceCreateInfoEXT; + + //=== VK_EXT_line_rasterization === + struct PhysicalDeviceLineRasterizationFeaturesEXT; + struct PhysicalDeviceLineRasterizationPropertiesEXT; + struct PipelineRasterizationLineStateCreateInfoEXT; + + //=== VK_EXT_shader_atomic_float === + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT; + + //=== VK_EXT_index_type_uint8 === + struct PhysicalDeviceIndexTypeUint8FeaturesEXT; + + //=== VK_EXT_extended_dynamic_state === + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT; + + //=== VK_KHR_pipeline_executable_properties === + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + struct PipelineInfoKHR; + using PipelineInfoEXT = PipelineInfoKHR; + struct PipelineExecutablePropertiesKHR; + struct PipelineExecutableInfoKHR; + union PipelineExecutableStatisticValueKHR; + struct PipelineExecutableStatisticKHR; + struct PipelineExecutableInternalRepresentationKHR; + + //=== VK_EXT_host_image_copy === + struct PhysicalDeviceHostImageCopyFeaturesEXT; + struct PhysicalDeviceHostImageCopyPropertiesEXT; + struct MemoryToImageCopyEXT; + struct ImageToMemoryCopyEXT; + struct CopyMemoryToImageInfoEXT; + struct CopyImageToMemoryInfoEXT; + struct CopyImageToImageInfoEXT; + struct HostImageLayoutTransitionInfoEXT; + struct SubresourceHostMemcpySizeEXT; + struct HostImageCopyDevicePerformanceQueryEXT; + + //=== VK_KHR_map_memory2 === + struct MemoryMapInfoKHR; + struct MemoryUnmapInfoKHR; + + //=== VK_EXT_shader_atomic_float2 === + struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + //=== VK_EXT_surface_maintenance1 === + struct SurfacePresentModeEXT; + struct SurfacePresentScalingCapabilitiesEXT; + struct SurfacePresentModeCompatibilityEXT; + + //=== VK_EXT_swapchain_maintenance1 === + struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT; + struct SwapchainPresentFenceInfoEXT; + struct SwapchainPresentModesCreateInfoEXT; + struct SwapchainPresentModeInfoEXT; + struct SwapchainPresentScalingCreateInfoEXT; + struct ReleaseSwapchainImagesInfoEXT; + + //=== VK_NV_device_generated_commands === + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + struct GraphicsShaderGroupCreateInfoNV; + struct GraphicsPipelineShaderGroupsCreateInfoNV; + struct BindShaderGroupIndirectCommandNV; + struct BindIndexBufferIndirectCommandNV; + struct BindVertexBufferIndirectCommandNV; + struct SetStateFlagsIndirectCommandNV; + struct IndirectCommandsStreamNV; + struct IndirectCommandsLayoutTokenNV; + struct IndirectCommandsLayoutCreateInfoNV; + struct GeneratedCommandsInfoNV; + struct GeneratedCommandsMemoryRequirementsInfoNV; + + //=== VK_NV_inherited_viewport_scissor === + struct PhysicalDeviceInheritedViewportScissorFeaturesNV; + struct CommandBufferInheritanceViewportScissorInfoNV; + + //=== VK_EXT_texel_buffer_alignment === + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + //=== VK_QCOM_render_pass_transform === + struct RenderPassTransformBeginInfoQCOM; + struct CommandBufferInheritanceRenderPassTransformInfoQCOM; + + //=== VK_EXT_depth_bias_control === + struct PhysicalDeviceDepthBiasControlFeaturesEXT; + struct DepthBiasInfoEXT; + struct DepthBiasRepresentationInfoEXT; + + //=== VK_EXT_device_memory_report === + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT; + struct DeviceDeviceMemoryReportCreateInfoEXT; + struct DeviceMemoryReportCallbackDataEXT; + + //=== VK_EXT_robustness2 === + struct PhysicalDeviceRobustness2FeaturesEXT; + struct PhysicalDeviceRobustness2PropertiesEXT; + + //=== VK_EXT_custom_border_color === + struct SamplerCustomBorderColorCreateInfoEXT; + struct PhysicalDeviceCustomBorderColorPropertiesEXT; + struct PhysicalDeviceCustomBorderColorFeaturesEXT; + + //=== VK_KHR_pipeline_library === + struct PipelineLibraryCreateInfoKHR; + + //=== VK_NV_present_barrier === + struct PhysicalDevicePresentBarrierFeaturesNV; + struct SurfaceCapabilitiesPresentBarrierNV; + struct SwapchainPresentBarrierCreateInfoNV; + + //=== VK_KHR_present_id === + struct PresentIdKHR; + struct PhysicalDevicePresentIdFeaturesKHR; + + //=== VK_KHR_video_encode_queue === + struct VideoEncodeInfoKHR; + struct VideoEncodeCapabilitiesKHR; + struct QueryPoolVideoEncodeFeedbackCreateInfoKHR; + struct VideoEncodeUsageInfoKHR; + struct VideoEncodeRateControlInfoKHR; + struct VideoEncodeRateControlLayerInfoKHR; + struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + struct VideoEncodeQualityLevelPropertiesKHR; + struct VideoEncodeQualityLevelInfoKHR; + struct VideoEncodeSessionParametersGetInfoKHR; + struct VideoEncodeSessionParametersFeedbackInfoKHR; + + //=== VK_NV_device_diagnostics_config === + struct PhysicalDeviceDiagnosticsConfigFeaturesNV; + struct DeviceDiagnosticsConfigCreateInfoNV; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + struct CudaModuleCreateInfoNV; + struct CudaFunctionCreateInfoNV; + struct CudaLaunchInfoNV; + struct PhysicalDeviceCudaKernelLaunchFeaturesNV; + struct PhysicalDeviceCudaKernelLaunchPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_low_latency === + struct QueryLowLatencySupportNV; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + struct ExportMetalObjectCreateInfoEXT; + struct ExportMetalObjectsInfoEXT; + struct ExportMetalDeviceInfoEXT; + struct ExportMetalCommandQueueInfoEXT; + struct ExportMetalBufferInfoEXT; + struct ImportMetalBufferInfoEXT; + struct ExportMetalTextureInfoEXT; + struct ImportMetalTextureInfoEXT; + struct ExportMetalIOSurfaceInfoEXT; + struct ImportMetalIOSurfaceInfoEXT; + struct ExportMetalSharedEventInfoEXT; + struct ImportMetalSharedEventInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + struct QueueFamilyCheckpointProperties2NV; + struct CheckpointData2NV; + + //=== VK_EXT_descriptor_buffer === + struct PhysicalDeviceDescriptorBufferPropertiesEXT; + struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + struct PhysicalDeviceDescriptorBufferFeaturesEXT; + struct DescriptorAddressInfoEXT; + struct DescriptorBufferBindingInfoEXT; + struct DescriptorBufferBindingPushDescriptorBufferHandleEXT; + union DescriptorDataEXT; + struct DescriptorGetInfoEXT; + struct BufferCaptureDescriptorDataInfoEXT; + struct ImageCaptureDescriptorDataInfoEXT; + struct ImageViewCaptureDescriptorDataInfoEXT; + struct SamplerCaptureDescriptorDataInfoEXT; + struct OpaqueCaptureDescriptorDataCreateInfoEXT; + struct AccelerationStructureCaptureDescriptorDataInfoEXT; + + //=== VK_EXT_graphics_pipeline_library === + struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + struct GraphicsPipelineLibraryCreateInfoEXT; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + //=== VK_KHR_fragment_shader_barycentric === + struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + //=== VK_NV_fragment_shading_rate_enums === + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + struct PipelineFragmentShadingRateEnumStateCreateInfoNV; + + //=== VK_NV_ray_tracing_motion_blur === + struct AccelerationStructureGeometryMotionTrianglesDataNV; + struct AccelerationStructureMotionInfoNV; + struct AccelerationStructureMotionInstanceNV; + union AccelerationStructureMotionInstanceDataNV; + struct AccelerationStructureMatrixMotionInstanceNV; + struct AccelerationStructureSRTMotionInstanceNV; + struct SRTDataNV; + struct PhysicalDeviceRayTracingMotionBlurFeaturesNV; + + //=== VK_EXT_mesh_shader === + struct PhysicalDeviceMeshShaderFeaturesEXT; + struct PhysicalDeviceMeshShaderPropertiesEXT; + struct DrawMeshTasksIndirectCommandEXT; + + //=== VK_EXT_ycbcr_2plane_444_formats === + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + //=== VK_EXT_fragment_density_map2 === + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT; + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT; + + //=== VK_QCOM_rotated_copy_commands === + struct CopyCommandTransformInfoQCOM; + + //=== VK_KHR_workgroup_memory_explicit_layout === + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + //=== VK_EXT_image_compression_control === + struct PhysicalDeviceImageCompressionControlFeaturesEXT; + struct ImageCompressionControlEXT; + struct ImageCompressionPropertiesEXT; + + //=== VK_EXT_attachment_feedback_loop_layout === + struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + //=== VK_EXT_4444_formats === + struct PhysicalDevice4444FormatsFeaturesEXT; + + //=== VK_EXT_device_fault === + struct PhysicalDeviceFaultFeaturesEXT; + struct DeviceFaultCountsEXT; + struct DeviceFaultInfoEXT; + struct DeviceFaultAddressInfoEXT; + struct DeviceFaultVendorInfoEXT; + struct DeviceFaultVendorBinaryHeaderVersionOneEXT; + + //=== VK_EXT_rgba10x6_formats === + struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + struct DirectFBSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + struct VertexInputBindingDescription2EXT; + struct VertexInputAttributeDescription2EXT; + + //=== VK_EXT_physical_device_drm === + struct PhysicalDeviceDrmPropertiesEXT; + + //=== VK_EXT_device_address_binding_report === + struct PhysicalDeviceAddressBindingReportFeaturesEXT; + struct DeviceAddressBindingCallbackDataEXT; + + //=== VK_EXT_depth_clip_control === + struct PhysicalDeviceDepthClipControlFeaturesEXT; + struct PipelineViewportDepthClipControlCreateInfoEXT; + + //=== VK_EXT_primitive_topology_list_restart === + struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + struct ImportMemoryZirconHandleInfoFUCHSIA; + struct MemoryZirconHandlePropertiesFUCHSIA; + struct MemoryGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + struct ImportSemaphoreZirconHandleInfoFUCHSIA; + struct SemaphoreGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + struct BufferCollectionCreateInfoFUCHSIA; + struct ImportMemoryBufferCollectionFUCHSIA; + struct BufferCollectionImageCreateInfoFUCHSIA; + struct BufferConstraintsInfoFUCHSIA; + struct BufferCollectionBufferCreateInfoFUCHSIA; + struct BufferCollectionPropertiesFUCHSIA; + struct SysmemColorSpaceFUCHSIA; + struct ImageConstraintsInfoFUCHSIA; + struct ImageFormatConstraintsInfoFUCHSIA; + struct BufferCollectionConstraintsInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + struct SubpassShadingPipelineCreateInfoHUAWEI; + struct PhysicalDeviceSubpassShadingFeaturesHUAWEI; + struct PhysicalDeviceSubpassShadingPropertiesHUAWEI; + + //=== VK_HUAWEI_invocation_mask === + struct PhysicalDeviceInvocationMaskFeaturesHUAWEI; + + //=== VK_NV_external_memory_rdma === + struct MemoryGetRemoteAddressInfoNV; + struct PhysicalDeviceExternalMemoryRDMAFeaturesNV; + + //=== VK_EXT_pipeline_properties === + struct PipelinePropertiesIdentifierEXT; + struct PhysicalDevicePipelinePropertiesFeaturesEXT; + + //=== VK_EXT_frame_boundary === + struct PhysicalDeviceFrameBoundaryFeaturesEXT; + struct FrameBoundaryEXT; + + //=== VK_EXT_multisampled_render_to_single_sampled === + struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + struct SubpassResolvePerformanceQueryEXT; + struct MultisampledRenderToSingleSampledInfoEXT; + + //=== VK_EXT_extended_dynamic_state2 === + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + struct ScreenSurfaceCreateInfoQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + struct PhysicalDeviceColorWriteEnableFeaturesEXT; + struct PipelineColorWriteCreateInfoEXT; + + //=== VK_EXT_primitives_generated_query === + struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + //=== VK_KHR_ray_tracing_maintenance1 === + struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + struct TraceRaysIndirectCommand2KHR; + + //=== VK_EXT_image_view_min_lod === + struct PhysicalDeviceImageViewMinLodFeaturesEXT; + struct ImageViewMinLodCreateInfoEXT; + + //=== VK_EXT_multi_draw === + struct PhysicalDeviceMultiDrawFeaturesEXT; + struct PhysicalDeviceMultiDrawPropertiesEXT; + struct MultiDrawInfoEXT; + struct MultiDrawIndexedInfoEXT; + + //=== VK_EXT_image_2d_view_of_3d === + struct PhysicalDeviceImage2DViewOf3DFeaturesEXT; + + //=== VK_EXT_shader_tile_image === + struct PhysicalDeviceShaderTileImageFeaturesEXT; + struct PhysicalDeviceShaderTileImagePropertiesEXT; + + //=== VK_EXT_opacity_micromap === + struct MicromapBuildInfoEXT; + struct MicromapUsageEXT; + struct MicromapCreateInfoEXT; + struct PhysicalDeviceOpacityMicromapFeaturesEXT; + struct PhysicalDeviceOpacityMicromapPropertiesEXT; + struct MicromapVersionInfoEXT; + struct CopyMicromapToMemoryInfoEXT; + struct CopyMemoryToMicromapInfoEXT; + struct CopyMicromapInfoEXT; + struct MicromapBuildSizesInfoEXT; + struct AccelerationStructureTrianglesOpacityMicromapEXT; + struct MicromapTriangleEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + struct PhysicalDeviceDisplacementMicromapFeaturesNV; + struct PhysicalDeviceDisplacementMicromapPropertiesNV; + struct AccelerationStructureTrianglesDisplacementMicromapNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + + //=== VK_EXT_border_color_swizzle === + struct PhysicalDeviceBorderColorSwizzleFeaturesEXT; + struct SamplerBorderColorComponentMappingCreateInfoEXT; + + //=== VK_EXT_pageable_device_local_memory === + struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + //=== VK_ARM_shader_core_properties === + struct PhysicalDeviceShaderCorePropertiesARM; + + //=== VK_ARM_scheduling_controls === + struct DeviceQueueShaderCoreControlCreateInfoARM; + struct PhysicalDeviceSchedulingControlsFeaturesARM; + struct PhysicalDeviceSchedulingControlsPropertiesARM; + + //=== VK_EXT_image_sliced_view_of_3d === + struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + struct ImageViewSlicedCreateInfoEXT; + + //=== VK_VALVE_descriptor_set_host_mapping === + struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + struct DescriptorSetBindingReferenceVALVE; + struct DescriptorSetLayoutHostMappingInfoVALVE; + + //=== VK_EXT_depth_clamp_zero_one === + struct PhysicalDeviceDepthClampZeroOneFeaturesEXT; + + //=== VK_EXT_non_seamless_cube_map === + struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + //=== VK_ARM_render_pass_striped === + struct PhysicalDeviceRenderPassStripedFeaturesARM; + struct PhysicalDeviceRenderPassStripedPropertiesARM; + struct RenderPassStripeBeginInfoARM; + struct RenderPassStripeInfoARM; + struct RenderPassStripeSubmitInfoARM; + + //=== VK_QCOM_fragment_density_map_offset === + struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + struct SubpassFragmentDensityMapOffsetEndInfoQCOM; + + //=== VK_NV_copy_memory_indirect === + struct CopyMemoryIndirectCommandNV; + struct CopyMemoryToImageIndirectCommandNV; + struct PhysicalDeviceCopyMemoryIndirectFeaturesNV; + struct PhysicalDeviceCopyMemoryIndirectPropertiesNV; + + //=== VK_NV_memory_decompression === + struct DecompressMemoryRegionNV; + struct PhysicalDeviceMemoryDecompressionFeaturesNV; + struct PhysicalDeviceMemoryDecompressionPropertiesNV; + + //=== VK_NV_device_generated_commands_compute === + struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + struct ComputePipelineIndirectBufferInfoNV; + struct PipelineIndirectDeviceAddressInfoNV; + struct BindPipelineIndirectCommandNV; + + //=== VK_NV_linear_color_attachment === + struct PhysicalDeviceLinearColorAttachmentFeaturesNV; + + //=== VK_EXT_image_compression_control_swapchain === + struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + //=== VK_QCOM_image_processing === + struct ImageViewSampleWeightCreateInfoQCOM; + struct PhysicalDeviceImageProcessingFeaturesQCOM; + struct PhysicalDeviceImageProcessingPropertiesQCOM; + + //=== VK_EXT_nested_command_buffer === + struct PhysicalDeviceNestedCommandBufferFeaturesEXT; + struct PhysicalDeviceNestedCommandBufferPropertiesEXT; + + //=== VK_EXT_external_memory_acquire_unmodified === + struct ExternalMemoryAcquireUnmodifiedEXT; + + //=== VK_EXT_extended_dynamic_state3 === + struct PhysicalDeviceExtendedDynamicState3FeaturesEXT; + struct PhysicalDeviceExtendedDynamicState3PropertiesEXT; + struct ColorBlendEquationEXT; + struct ColorBlendAdvancedEXT; + + //=== VK_EXT_subpass_merge_feedback === + struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + struct RenderPassCreationControlEXT; + struct RenderPassCreationFeedbackInfoEXT; + struct RenderPassCreationFeedbackCreateInfoEXT; + struct RenderPassSubpassFeedbackInfoEXT; + struct RenderPassSubpassFeedbackCreateInfoEXT; + + //=== VK_LUNARG_direct_driver_loading === + struct DirectDriverLoadingInfoLUNARG; + struct DirectDriverLoadingListLUNARG; + + //=== VK_EXT_shader_module_identifier === + struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT; + struct PipelineShaderStageModuleIdentifierCreateInfoEXT; + struct ShaderModuleIdentifierEXT; + + //=== VK_EXT_rasterization_order_attachment_access === + struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + //=== VK_NV_optical_flow === + struct PhysicalDeviceOpticalFlowFeaturesNV; + struct PhysicalDeviceOpticalFlowPropertiesNV; + struct OpticalFlowImageFormatInfoNV; + struct OpticalFlowImageFormatPropertiesNV; + struct OpticalFlowSessionCreateInfoNV; + struct OpticalFlowSessionCreatePrivateDataInfoNV; + struct OpticalFlowExecuteInfoNV; + + //=== VK_EXT_legacy_dithering === + struct PhysicalDeviceLegacyDitheringFeaturesEXT; + + //=== VK_EXT_pipeline_protected_access === + struct PhysicalDevicePipelineProtectedAccessFeaturesEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + struct PhysicalDeviceExternalFormatResolveFeaturesANDROID; + struct PhysicalDeviceExternalFormatResolvePropertiesANDROID; + struct AndroidHardwareBufferFormatResolvePropertiesANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_maintenance5 === + struct PhysicalDeviceMaintenance5FeaturesKHR; + struct PhysicalDeviceMaintenance5PropertiesKHR; + struct RenderingAreaInfoKHR; + struct DeviceImageSubresourceInfoKHR; + struct ImageSubresource2KHR; + using ImageSubresource2EXT = ImageSubresource2KHR; + struct SubresourceLayout2KHR; + using SubresourceLayout2EXT = SubresourceLayout2KHR; + struct PipelineCreateFlags2CreateInfoKHR; + struct BufferUsageFlags2CreateInfoKHR; + + //=== VK_KHR_ray_tracing_position_fetch === + struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + //=== VK_EXT_shader_object === + struct PhysicalDeviceShaderObjectFeaturesEXT; + struct PhysicalDeviceShaderObjectPropertiesEXT; + struct ShaderCreateInfoEXT; + + //=== VK_QCOM_tile_properties === + struct PhysicalDeviceTilePropertiesFeaturesQCOM; + struct TilePropertiesQCOM; + + //=== VK_SEC_amigo_profiling === + struct PhysicalDeviceAmigoProfilingFeaturesSEC; + struct AmigoProfilingSubmitInfoSEC; + + //=== VK_QCOM_multiview_per_view_viewports === + struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + //=== VK_NV_ray_tracing_invocation_reorder === + struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + //=== VK_NV_extended_sparse_address_space === + struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + //=== VK_EXT_mutable_descriptor_type === + struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + struct MutableDescriptorTypeListEXT; + using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT; + struct MutableDescriptorTypeCreateInfoEXT; + using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; + + //=== VK_EXT_layer_settings === + struct LayerSettingsCreateInfoEXT; + struct LayerSettingEXT; + + //=== VK_ARM_shader_core_builtins === + struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + //=== VK_EXT_pipeline_library_group_handles === + struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + //=== VK_NV_low_latency2 === + struct LatencySleepModeInfoNV; + struct LatencySleepInfoNV; + struct SetLatencyMarkerInfoNV; + struct GetLatencyMarkerInfoNV; + struct LatencyTimingsFrameReportNV; + struct LatencySubmissionPresentIdNV; + struct SwapchainLatencyCreateInfoNV; + struct OutOfBandQueueTypeInfoNV; + struct LatencySurfaceCapabilitiesNV; + + //=== VK_KHR_cooperative_matrix === + struct CooperativeMatrixPropertiesKHR; + struct PhysicalDeviceCooperativeMatrixFeaturesKHR; + struct PhysicalDeviceCooperativeMatrixPropertiesKHR; + + //=== VK_QCOM_multiview_per_view_render_areas === + struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + //=== VK_KHR_video_maintenance1 === + struct PhysicalDeviceVideoMaintenance1FeaturesKHR; + struct VideoInlineQueryInfoKHR; + + //=== VK_NV_per_stage_descriptor_set === + struct PhysicalDevicePerStageDescriptorSetFeaturesNV; + + //=== VK_QCOM_image_processing2 === + struct PhysicalDeviceImageProcessing2FeaturesQCOM; + struct PhysicalDeviceImageProcessing2PropertiesQCOM; + struct SamplerBlockMatchWindowCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_weights === + struct PhysicalDeviceCubicWeightsFeaturesQCOM; + struct SamplerCubicWeightsCreateInfoQCOM; + struct BlitImageCubicWeightsInfoQCOM; + + //=== VK_QCOM_ycbcr_degamma === + struct PhysicalDeviceYcbcrDegammaFeaturesQCOM; + struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_clamp === + struct PhysicalDeviceCubicClampFeaturesQCOM; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + + //=== VK_KHR_vertex_attribute_divisor === + struct PhysicalDeviceVertexAttributeDivisorPropertiesKHR; + struct VertexInputBindingDivisorDescriptionKHR; + using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescriptionKHR; + struct PipelineVertexInputDivisorStateCreateInfoKHR; + using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfoKHR; + struct PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + struct ScreenBufferPropertiesQNX; + struct ScreenBufferFormatPropertiesQNX; + struct ImportScreenBufferInfoQNX; + struct ExternalFormatQNX; + struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + struct PhysicalDeviceLayeredDriverPropertiesMSFT; + + //=== VK_KHR_calibrated_timestamps === + struct CalibratedTimestampInfoKHR; + using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR; + + //=== VK_KHR_maintenance6 === + struct PhysicalDeviceMaintenance6FeaturesKHR; + struct PhysicalDeviceMaintenance6PropertiesKHR; + struct BindMemoryStatusKHR; + struct BindDescriptorSetsInfoKHR; + struct PushConstantsInfoKHR; + struct PushDescriptorSetInfoKHR; + struct PushDescriptorSetWithTemplateInfoKHR; + struct SetDescriptorBufferOffsetsInfoEXT; + struct BindDescriptorBufferEmbeddedSamplersInfoEXT; + + //=== VK_NV_descriptor_pool_overallocation === + struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + //=================================== + //=== HANDLE forward declarations === + //=================================== + + //=== VK_VERSION_1_0 === + class Instance; + class PhysicalDevice; + class Device; + class Queue; + class DeviceMemory; + class Fence; + class Semaphore; + class Event; + class QueryPool; + class Buffer; + class BufferView; + class Image; + class ImageView; + class ShaderModule; + class PipelineCache; + class Pipeline; + class PipelineLayout; + class Sampler; + class DescriptorPool; + class DescriptorSet; + class DescriptorSetLayout; + class Framebuffer; + class RenderPass; + class CommandPool; + class CommandBuffer; + + //=== VK_VERSION_1_1 === + class SamplerYcbcrConversion; + class DescriptorUpdateTemplate; + + //=== VK_VERSION_1_3 === + class PrivateDataSlot; + + //=== VK_KHR_surface === + class SurfaceKHR; + + //=== VK_KHR_swapchain === + class SwapchainKHR; + + //=== VK_KHR_display === + class DisplayKHR; + class DisplayModeKHR; + + //=== VK_EXT_debug_report === + class DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + class VideoSessionKHR; + class VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + class CuModuleNVX; + class CuFunctionNVX; + + //=== VK_EXT_debug_utils === + class DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + class AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + class ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + class AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + class PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + class DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + class IndirectCommandsLayoutNV; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + class CudaModuleNV; + class CudaFunctionNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + class BufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + class MicromapEXT; + + //=== VK_NV_optical_flow === + class OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + class ShaderEXT; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + //====================== + //=== UNIQUE HANDLEs === + //====================== + + //=== VK_VERSION_1_0 === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueInstance = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueDevice = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectFree; + }; + + using UniqueDeviceMemory = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueFence = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueSemaphore = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueEvent = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueQueryPool = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueBuffer = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueBufferView = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueImage = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueImageView = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueShaderModule = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniquePipelineCache = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniquePipeline = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniquePipelineLayout = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueSampler = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueDescriptorPool = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; + + using UniqueDescriptorSet = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueDescriptorSetLayout = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueFramebuffer = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueRenderPass = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueCommandPool = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = PoolFree; + }; + + using UniqueCommandBuffer = UniqueHandle; + + //=== VK_VERSION_1_1 === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueSamplerYcbcrConversion = UniqueHandle; + using UniqueSamplerYcbcrConversionKHR = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueDescriptorUpdateTemplate = UniqueHandle; + using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; + + //=== VK_VERSION_1_3 === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniquePrivateDataSlot = UniqueHandle; + using UniquePrivateDataSlotEXT = UniqueHandle; + + //=== VK_KHR_surface === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueSurfaceKHR = UniqueHandle; + + //=== VK_KHR_swapchain === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueSwapchainKHR = UniqueHandle; + + //=== VK_KHR_display === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueDisplayKHR = UniqueHandle; + + //=== VK_EXT_debug_report === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueDebugReportCallbackEXT = UniqueHandle; + + //=== VK_KHR_video_queue === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueVideoSessionKHR = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueVideoSessionParametersKHR = UniqueHandle; + + //=== VK_NVX_binary_import === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueCuModuleNVX = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueCuFunctionNVX = UniqueHandle; + + //=== VK_EXT_debug_utils === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + + //=== VK_KHR_acceleration_structure === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueAccelerationStructureKHR = UniqueHandle; + + //=== VK_EXT_validation_cache === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueValidationCacheEXT = UniqueHandle; + + //=== VK_NV_ray_tracing === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueAccelerationStructureNV = UniqueHandle; + + //=== VK_INTEL_performance_query === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniquePerformanceConfigurationINTEL = UniqueHandle; + + //=== VK_KHR_deferred_host_operations === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueDeferredOperationKHR = UniqueHandle; + + //=== VK_NV_device_generated_commands === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueIndirectCommandsLayoutNV = UniqueHandle; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueCudaModuleNV = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueCudaFunctionNV = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueBufferCollectionFUCHSIA = UniqueHandle; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueMicromapEXT = UniqueHandle; + + //=== VK_NV_optical_flow === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueOpticalFlowSessionNV = UniqueHandle; + + //=== VK_EXT_shader_object === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniqueShaderEXT = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //=============== + //=== HANDLEs === + //=============== + + template + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; + }; + + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + using NativeType = VkSurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: + VULKAN_HPP_CONSTEXPR SurfaceKHR() = default; + + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceKHR const & ) const = default; +#else + bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == rhs.m_surfaceKHR; + } + + bool operator!=( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != rhs.m_surfaceKHR; + } + + bool operator<( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR < rhs.m_surfaceKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + using NativeType = VkDebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default; + + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugReportCallbackEXT const & ) const = default; +#else + bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; + } + + bool operator!=( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; + } + + bool operator<( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + using NativeType = VkDebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default; + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsMessengerEXT const & ) const = default; +#else + bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; + } + + bool operator!=( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; + } + + bool operator<( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + using NativeType = VkDisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayKHR() = default; + + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayKHR const & ) const = default; +#else + bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == rhs.m_displayKHR; + } + + bool operator!=( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != rhs.m_displayKHR; + } + + bool operator<( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR < rhs.m_displayKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + using NativeType = VkSwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: + VULKAN_HPP_CONSTEXPR SwapchainKHR() = default; + + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( swapchainKHR ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainKHR const & ) const = default; +#else + bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == rhs.m_swapchainKHR; + } + + bool operator!=( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != rhs.m_swapchainKHR; + } + + bool operator<( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR < rhs.m_swapchainKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Semaphore + { + public: + using CType = VkSemaphore; + using NativeType = VkSemaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: + VULKAN_HPP_CONSTEXPR Semaphore() = default; + + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Semaphore const & ) const = default; +#else + bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == rhs.m_semaphore; + } + + bool operator!=( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != rhs.m_semaphore; + } + + bool operator<( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore < rhs.m_semaphore; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Fence + { + public: + using CType = VkFence; + using NativeType = VkFence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: + VULKAN_HPP_CONSTEXPR Fence() = default; + + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_fence = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Fence const & ) const = default; +#else + bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence == rhs.m_fence; + } + + bool operator!=( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence != rhs.m_fence; + } + + bool operator<( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence < rhs.m_fence; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; + using NativeType = VkPerformanceConfigurationINTEL; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default; + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PerformanceConfigurationINTEL & operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = performanceConfigurationINTEL; + return *this; + } +#endif + + PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; + } + + bool operator!=( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; + } + + bool operator<( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == VK_NULL_HANDLE; + } + + private: + VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class QueryPool + { + public: + using CType = VkQueryPool; + using NativeType = VkQueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: + VULKAN_HPP_CONSTEXPR QueryPool() = default; + + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPool const & ) const = default; +#else + bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == rhs.m_queryPool; + } + + bool operator!=( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != rhs.m_queryPool; + } + + bool operator<( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool < rhs.m_queryPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Buffer + { + public: + using CType = VkBuffer; + using NativeType = VkBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + + public: + VULKAN_HPP_CONSTEXPR Buffer() = default; + + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT + { + m_buffer = buffer; + return *this; + } +#endif + + Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_buffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Buffer const & ) const = default; +#else + bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer == rhs.m_buffer; + } + + bool operator!=( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer != rhs.m_buffer; + } + + bool operator<( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer < rhs.m_buffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_buffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_buffer == VK_NULL_HANDLE; + } + + private: + VkBuffer m_buffer = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + using NativeType = VkPipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: + VULKAN_HPP_CONSTEXPR PipelineLayout() = default; + + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( pipelineLayout ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = pipelineLayout; + return *this; + } +#endif + + PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayout const & ) const = default; +#else + bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == rhs.m_pipelineLayout; + } + + bool operator!=( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != rhs.m_pipelineLayout; + } + + bool operator<( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout < rhs.m_pipelineLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + using NativeType = VkDescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSet() = default; + + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( descriptorSet ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSet const & ) const = default; +#else + bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == rhs.m_descriptorSet; + } + + bool operator!=( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != rhs.m_descriptorSet; + } + + bool operator<( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet < rhs.m_descriptorSet; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ImageView + { + public: + using CType = VkImageView; + using NativeType = VkImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: + VULKAN_HPP_CONSTEXPR ImageView() = default; + + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_imageView = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageView const & ) const = default; +#else + bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView == rhs.m_imageView; + } + + bool operator!=( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView != rhs.m_imageView; + } + + bool operator<( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView < rhs.m_imageView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Pipeline + { + public: + using CType = VkPipeline; + using NativeType = VkPipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: + VULKAN_HPP_CONSTEXPR Pipeline() = default; + + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Pipeline const & ) const = default; +#else + bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == rhs.m_pipeline; + } + + bool operator!=( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != rhs.m_pipeline; + } + + bool operator<( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline < rhs.m_pipeline; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ShaderEXT + { + public: + using CType = VkShaderEXT; + using NativeType = VkShaderEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR ShaderEXT() = default; + + VULKAN_HPP_CONSTEXPR ShaderEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderEXT( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( shaderEXT ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ShaderEXT & operator=( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = shaderEXT; + return *this; + } +#endif + + ShaderEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderEXT const & ) const = default; +#else + bool operator==( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT == rhs.m_shaderEXT; + } + + bool operator!=( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT != rhs.m_shaderEXT; + } + + bool operator<( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT < rhs.m_shaderEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderEXT() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT == VK_NULL_HANDLE; + } + + private: + VkShaderEXT m_shaderEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Image + { + public: + using CType = VkImage; + using NativeType = VkImage; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: + VULKAN_HPP_CONSTEXPR Image() = default; + + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT + { + m_image = image; + return *this; + } +#endif + + Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_image = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Image const & ) const = default; +#else + bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image == rhs.m_image; + } + + bool operator!=( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image != rhs.m_image; + } + + bool operator<( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image < rhs.m_image; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + using NativeType = VkAccelerationStructureNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default; + + VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( accelerationStructureNV ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = accelerationStructureNV; + return *this; + } +#endif + + AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureNV const & ) const = default; +#else + bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == rhs.m_accelerationStructureNV; + } + + bool operator!=( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != rhs.m_accelerationStructureNV; + } + + bool operator<( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV < rhs.m_accelerationStructureNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureNV m_accelerationStructureNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class OpticalFlowSessionNV + { + public: + using CType = VkOpticalFlowSessionNV; + using NativeType = VkOpticalFlowSessionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV() = default; + + VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT OpticalFlowSessionNV( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT + : m_opticalFlowSessionNV( opticalFlowSessionNV ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + OpticalFlowSessionNV & operator=( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = opticalFlowSessionNV; + return *this; + } +#endif + + OpticalFlowSessionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowSessionNV const & ) const = default; +#else + bool operator==( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV == rhs.m_opticalFlowSessionNV; + } + + bool operator!=( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV != rhs.m_opticalFlowSessionNV; + } + + bool operator<( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV < rhs.m_opticalFlowSessionNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkOpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV == VK_NULL_HANDLE; + } + + private: + VkOpticalFlowSessionNV m_opticalFlowSessionNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + using NativeType = VkDescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default; + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplate const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; + } + + bool operator!=( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + } + + bool operator<( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + class Event + { + public: + using CType = VkEvent; + using NativeType = VkEvent; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: + VULKAN_HPP_CONSTEXPR Event() = default; + + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_event = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Event const & ) const = default; +#else + bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event == rhs.m_event; + } + + bool operator!=( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event != rhs.m_event; + } + + bool operator<( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event < rhs.m_event; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + using NativeType = VkAccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default; + + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( accelerationStructureKHR ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = accelerationStructureKHR; + return *this; + } +#endif + + AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureKHR const & ) const = default; +#else + bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; + } + + bool operator!=( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; + } + + bool operator<( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureKHR m_accelerationStructureKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class MicromapEXT + { + public: + using CType = VkMicromapEXT; + using NativeType = VkMicromapEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR MicromapEXT() = default; + + VULKAN_HPP_CONSTEXPR MicromapEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT MicromapEXT( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT : m_micromapEXT( micromapEXT ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + MicromapEXT & operator=( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = micromapEXT; + return *this; + } +#endif + + MicromapEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapEXT const & ) const = default; +#else + bool operator==( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT == rhs.m_micromapEXT; + } + + bool operator!=( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT != rhs.m_micromapEXT; + } + + bool operator<( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT < rhs.m_micromapEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkMicromapEXT() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT == VK_NULL_HANDLE; + } + + private: + VkMicromapEXT m_micromapEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::MicromapEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + using NativeType = VkCommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + VULKAN_HPP_CONSTEXPR CommandBuffer() = default; + + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( commandBuffer ) {} + + CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = commandBuffer; + return *this; + } + + CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBuffer const & ) const = default; +#else + bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == rhs.m_commandBuffer; + } + + bool operator!=( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != rhs.m_commandBuffer; + } + + bool operator<( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer < rhs.m_commandBuffer; + } +#endif + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissor( uint32_t firstScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + template + void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + template + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents2( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endRendering( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportWithCount( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setScissorWithCount( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void bindVertexBuffers2( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_marker === + + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === + + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_decode_queue === + + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_transform_feedback === + + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NVX_binary_import === + + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_draw_indirect_count === + + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_dynamic_rendering === + + template + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endRenderingKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_push_descriptor === + + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_conditional_rendering === + + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_clip_space_w_scaling === + + template + void setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_discard_rectangles === + + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_create_renderpass2 === + + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_utils === + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + template + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === + + template + void buildAccelerationStructuresKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === + + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_shading_rate_image === + + template + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_ray_tracing === + + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_draw_indirect_count === + + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_buffer_marker === + + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_mesh_shader === + + template + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_scissor_exclusive === + + template + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCheckpointNV( CheckpointMarkerType const & checkpointMarker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_fragment_shading_rate === + + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_line_rasterization === + + template + void setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state === + + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2EXT( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands === + + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_depth_bias_control === + + template + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + template + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_descriptor_buffer === + + template + void bindDescriptorBuffersEXT( uint32_t bufferCount, + const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_fragment_shading_rate_enums === + + template + void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_mesh_shader === + + template + void drawMeshTasksEXT( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_copy_commands2 === + + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_vertex_input_dynamic_state === + + template + void setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_HUAWEI_subpass_shading === + + template + void subpassShadingHUAWEI( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_invocation_mask === + + template + void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state2 === + + template + void setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_color_write_enable === + + template + void setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_maintenance1 === + + template + void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_multi_draw === + + template + void drawMultiEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void drawMultiIndexedEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_opacity_micromap === + + template + void buildMicromapsEXT( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_HUAWEI_cluster_culling_shader === + + template + void drawClusterHUAWEI( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_copy_memory_indirect === + + template + void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_memory_decompression === + + template + void decompressMemoryNV( uint32_t decompressRegionCount, + const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands_compute === + + template + void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state3 === + + template + void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setColorBlendEnableEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setColorBlendEquationEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setColorWriteMaskEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportSwizzleNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageToColorLocationNV( uint32_t coverageToColorLocation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageModulationTableNV( uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_optical_flow === + + template + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance5 === + + template + void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + template + void bindShadersEXT( uint32_t stageCount, + const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + template + void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance6 === + + template + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DeviceMemory + { + public: + using CType = VkDeviceMemory; + using NativeType = VkDeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: + VULKAN_HPP_CONSTEXPR DeviceMemory() = default; + + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( deviceMemory ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemory const & ) const = default; +#else + bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == rhs.m_deviceMemory; + } + + bool operator!=( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != rhs.m_deviceMemory; + } + + bool operator<( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory < rhs.m_deviceMemory; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + using NativeType = VkVideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR VideoSessionKHR() = default; + + VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT : m_videoSessionKHR( videoSessionKHR ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = videoSessionKHR; + return *this; + } +#endif + + VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionKHR const & ) const = default; +#else + bool operator==( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == rhs.m_videoSessionKHR; + } + + bool operator!=( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != rhs.m_videoSessionKHR; + } + + bool operator<( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR < rhs.m_videoSessionKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionKHR m_videoSessionKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + using NativeType = VkDeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR DeferredOperationKHR() = default; + + VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( deferredOperationKHR ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = deferredOperationKHR; + return *this; + } +#endif + + DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeferredOperationKHR const & ) const = default; +#else + bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == rhs.m_deferredOperationKHR; + } + + bool operator!=( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != rhs.m_deferredOperationKHR; + } + + bool operator<( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR < rhs.m_deferredOperationKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR == VK_NULL_HANDLE; + } + + private: + VkDeferredOperationKHR m_deferredOperationKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + class BufferCollectionFUCHSIA + { + public: + using CType = VkBufferCollectionFUCHSIA; + using NativeType = VkBufferCollectionFUCHSIA; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; + + public: + VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA() = default; + + VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferCollectionFUCHSIA( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT + : m_bufferCollectionFUCHSIA( bufferCollectionFUCHSIA ) + { + } + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + BufferCollectionFUCHSIA & operator=( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT + { + m_bufferCollectionFUCHSIA = bufferCollectionFUCHSIA; + return *this; + } +# endif + + BufferCollectionFUCHSIA & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferCollectionFUCHSIA = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA == rhs.m_bufferCollectionFUCHSIA; + } + + bool operator!=( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA != rhs.m_bufferCollectionFUCHSIA; + } + + bool operator<( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA < rhs.m_bufferCollectionFUCHSIA; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferCollectionFUCHSIA == VK_NULL_HANDLE; + } + + private: + VkBufferCollectionFUCHSIA m_bufferCollectionFUCHSIA = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + class BufferView + { + public: + using CType = VkBufferView; + using NativeType = VkBufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: + VULKAN_HPP_CONSTEXPR BufferView() = default; + + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferView const & ) const = default; +#else + bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == rhs.m_bufferView; + } + + bool operator!=( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != rhs.m_bufferView; + } + + bool operator<( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView < rhs.m_bufferView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CommandPool + { + public: + using CType = VkCommandPool; + using NativeType = VkCommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + + public: + VULKAN_HPP_CONSTEXPR CommandPool() = default; + + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT : m_commandPool( commandPool ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = commandPool; + return *this; + } +#endif + + CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPool const & ) const = default; +#else + bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == rhs.m_commandPool; + } + + bool operator!=( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != rhs.m_commandPool; + } + + bool operator<( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool < rhs.m_commandPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PipelineCache + { + public: + using CType = VkPipelineCache; + using NativeType = VkPipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: + VULKAN_HPP_CONSTEXPR PipelineCache() = default; + + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( pipelineCache ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCache const & ) const = default; +#else + bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == rhs.m_pipelineCache; + } + + bool operator!=( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != rhs.m_pipelineCache; + } + + bool operator<( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache < rhs.m_pipelineCache; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CuFunctionNVX + { + public: + using CType = VkCuFunctionNVX; + using NativeType = VkCuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: + VULKAN_HPP_CONSTEXPR CuFunctionNVX() = default; + + VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT : m_cuFunctionNVX( cuFunctionNVX ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = cuFunctionNVX; + return *this; + } +#endif + + CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuFunctionNVX const & ) const = default; +#else + bool operator==( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == rhs.m_cuFunctionNVX; + } + + bool operator!=( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != rhs.m_cuFunctionNVX; + } + + bool operator<( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX < rhs.m_cuFunctionNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == VK_NULL_HANDLE; + } + + private: + VkCuFunctionNVX m_cuFunctionNVX = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class CuModuleNVX + { + public: + using CType = VkCuModuleNVX; + using NativeType = VkCuModuleNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: + VULKAN_HPP_CONSTEXPR CuModuleNVX() = default; + + VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT : m_cuModuleNVX( cuModuleNVX ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = cuModuleNVX; + return *this; + } +#endif + + CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleNVX const & ) const = default; +#else + bool operator==( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == rhs.m_cuModuleNVX; + } + + bool operator!=( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != rhs.m_cuModuleNVX; + } + + bool operator<( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX < rhs.m_cuModuleNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == VK_NULL_HANDLE; + } + + private: + VkCuModuleNVX m_cuModuleNVX = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaFunctionNV + { + public: + using CType = VkCudaFunctionNV; + using NativeType = VkCudaFunctionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; + + public: + VULKAN_HPP_CONSTEXPR CudaFunctionNV() = default; + + VULKAN_HPP_CONSTEXPR CudaFunctionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CudaFunctionNV( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT : m_cudaFunctionNV( cudaFunctionNV ) {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CudaFunctionNV & operator=( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = cudaFunctionNV; + return *this; + } +# endif + + CudaFunctionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CudaFunctionNV const & ) const = default; +# else + bool operator==( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV == rhs.m_cudaFunctionNV; + } + + bool operator!=( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV != rhs.m_cudaFunctionNV; + } + + bool operator<( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV < rhs.m_cudaFunctionNV; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaFunctionNV() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV == VK_NULL_HANDLE; + } + + private: + VkCudaFunctionNV m_cudaFunctionNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaModuleNV + { + public: + using CType = VkCudaModuleNV; + using NativeType = VkCudaModuleNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; + + public: + VULKAN_HPP_CONSTEXPR CudaModuleNV() = default; + + VULKAN_HPP_CONSTEXPR CudaModuleNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CudaModuleNV( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT : m_cudaModuleNV( cudaModuleNV ) {} + +# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + CudaModuleNV & operator=( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = cudaModuleNV; + return *this; + } +# endif + + CudaModuleNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = {}; + return *this; + } + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CudaModuleNV const & ) const = default; +# else + bool operator==( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV == rhs.m_cudaModuleNV; + } + + bool operator!=( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV != rhs.m_cudaModuleNV; + } + + bool operator<( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV < rhs.m_cudaModuleNV; + } +# endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaModuleNV() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV == VK_NULL_HANDLE; + } + + private: + VkCudaModuleNV m_cudaModuleNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + using NativeType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + VULKAN_HPP_CONSTEXPR DescriptorPool() = default; + + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( descriptorPool ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPool const & ) const = default; +#else + bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == rhs.m_descriptorPool; + } + + bool operator!=( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != rhs.m_descriptorPool; + } + + bool operator<( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool < rhs.m_descriptorPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + using NativeType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() = default; + + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( descriptorSetLayout ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayout const & ) const = default; +#else + bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == rhs.m_descriptorSetLayout; + } + + bool operator!=( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != rhs.m_descriptorSetLayout; + } + + bool operator<( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout < rhs.m_descriptorSetLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + using NativeType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + VULKAN_HPP_CONSTEXPR Framebuffer() = default; + + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT : m_framebuffer( framebuffer ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Framebuffer const & ) const = default; +#else + bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == rhs.m_framebuffer; + } + + bool operator!=( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != rhs.m_framebuffer; + } + + bool operator<( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer < rhs.m_framebuffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + using NativeType = VkIndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default; + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; + return *this; + } +#endif + + IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; + } + + bool operator!=( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; + } + + bool operator<( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PrivateDataSlot + { + public: + using CType = VkPrivateDataSlot; + using NativeType = VkPrivateDataSlot; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR PrivateDataSlot() = default; + + VULKAN_HPP_CONSTEXPR PrivateDataSlot( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlot( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT : m_privateDataSlot( privateDataSlot ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + PrivateDataSlot & operator=( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = privateDataSlot; + return *this; + } +#endif + + PrivateDataSlot & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlot const & ) const = default; +#else + bool operator==( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot == rhs.m_privateDataSlot; + } + + bool operator!=( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot != rhs.m_privateDataSlot; + } + + bool operator<( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot < rhs.m_privateDataSlot; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlot() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot == VK_NULL_HANDLE; + } + + private: + VkPrivateDataSlot m_privateDataSlot = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + using PrivateDataSlotEXT = PrivateDataSlot; + + class RenderPass + { + public: + using CType = VkRenderPass; + using NativeType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + VULKAN_HPP_CONSTEXPR RenderPass() = default; + + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPass const & ) const = default; +#else + bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == rhs.m_renderPass; + } + + bool operator!=( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != rhs.m_renderPass; + } + + bool operator<( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass < rhs.m_renderPass; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Sampler + { + public: + using CType = VkSampler; + using NativeType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + VULKAN_HPP_CONSTEXPR Sampler() = default; + + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_sampler = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Sampler const & ) const = default; +#else + bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler == rhs.m_sampler; + } + + bool operator!=( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler != rhs.m_sampler; + } + + bool operator<( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler < rhs.m_sampler; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + using NativeType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default; + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversion const & ) const = default; +#else + bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; + } + + bool operator!=( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; + } + + bool operator<( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + class ShaderModule + { + public: + using CType = VkShaderModule; + using NativeType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + VULKAN_HPP_CONSTEXPR ShaderModule() = default; + + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT : m_shaderModule( shaderModule ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModule const & ) const = default; +#else + bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == rhs.m_shaderModule; + } + + bool operator!=( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != rhs.m_shaderModule; + } + + bool operator<( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule < rhs.m_shaderModule; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + using NativeType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() = default; + + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( validationCacheEXT ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheEXT const & ) const = default; +#else + bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == rhs.m_validationCacheEXT; + } + + bool operator!=( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != rhs.m_validationCacheEXT; + } + + bool operator<( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT < rhs.m_validationCacheEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + using NativeType = VkVideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default; + + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( videoSessionParametersKHR ) + { + } + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = videoSessionParametersKHR; + return *this; + } +#endif + + VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersKHR const & ) const = default; +#else + bool operator==( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == rhs.m_videoSessionParametersKHR; + } + + bool operator!=( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != rhs.m_videoSessionParametersKHR; + } + + bool operator<( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR < rhs.m_videoSessionParametersKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionParametersKHR m_videoSessionParametersKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Queue + { + public: + using CType = VkQueue; + using NativeType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + VULKAN_HPP_CONSTEXPR Queue() = default; + + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {} + + Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT + { + m_queue = queue; + return *this; + } + + Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queue = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Queue const & ) const = default; +#else + bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue == rhs.m_queue; + } + + bool operator!=( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue != rhs.m_queue; + } + + bool operator<( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue < rhs.m_queue; + } +#endif + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD Result submit2( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_utils === + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = CheckpointDataNVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_synchronization2 === + + template + VULKAN_HPP_NODISCARD Result submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = CheckpointData2NVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + template + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkQueue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Device + { + public: + using CType = VkDevice; + using NativeType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + VULKAN_HPP_CONSTEXPR Device() = default; + + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {} + + Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT + { + m_device = device; + return *this; + } + + Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_device = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Device const & ) const = default; +#else + bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device == rhs.m_device; + } + + bool operator!=( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device != rhs.m_device; + } + + bool operator<( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device < rhs.m_device; + } +#endif + + //=== VK_VERSION_1_0 === + + template + PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements + getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements + getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SparseImageMemoryRequirementsAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout + getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = PipelineAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = PipelineAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + void resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = DescriptorSetAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = DescriptorSetAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + Result( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = CommandBufferAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = CommandBufferAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_2 === + + template + VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD Result createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = ImageAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display_swapchain === + + template + VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = SwapchainKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = SwapchainKHRAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_marker === + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD Result createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = VideoSessionMemoryRequirementsKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_binary_import === + + template + VULKAN_HPP_NODISCARD Result createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_image_view_handle === + + template + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_shader_info === + + template + VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance1 === + + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + template + VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryFdPropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + template + VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + template + VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_descriptor_update_template === + + template + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_display_control === + + template + VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_GOOGLE_display_timing === + + template + VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PastPresentationTimingGOOGLEAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_hdr_metadata === + + template + void setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_create_renderpass2 === + + template + VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + template + VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + template + VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + template + VULKAN_HPP_NODISCARD Result + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + template + VULKAN_HPP_NODISCARD Result createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = PipelineAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createExecutionGraphPipelineAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_get_memory_requirements2 === + + template + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === + + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceAddress + getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = PipelineAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_sampler_ycbcr_conversion === + + template + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_bind_memory2 === + + template + VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_image_drm_format_modifier === + + template + VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_validation_cache === + + template + VULKAN_HPP_NODISCARD Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = PipelineAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingShaderGroupHandleNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === + + template + void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_external_memory_host === + + template + VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = Uint64_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_timeline_semaphore === + + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + + template + VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_display_native_hdr === + + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_buffer_device_address === + + template + DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_present_wait === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + + template + DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_host_query_reset === + + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_deferred_host_operations === + + template + VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDeferredOperationKHR( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDeferredOperationKHRUnique( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_pipeline_executable_properties === + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PipelineExecutablePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PipelineExecutableStatisticKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PipelineExecutableInternalRepresentationKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_host_image_copy === + + template + VULKAN_HPP_NODISCARD Result copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_map_memory2 === + + template + VULKAN_HPP_NODISCARD Result mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_swapchain_maintenance1 === + + template + VULKAN_HPP_NODISCARD Result releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands === + + template + void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_private_data === + + template + VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + template + VULKAN_HPP_NODISCARD Result + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B2 = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B2 = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + template + VULKAN_HPP_NODISCARD Result createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + size_t * pCacheSize, + void * pCacheData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCudaModuleCacheNV( + VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + template + void exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT + exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + + template + void getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_fault === + + template + VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue> + getFaultInfoEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + template + VULKAN_HPP_NODISCARD Result + importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + template + VULKAN_HPP_NODISCARD Result createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + template + VULKAN_HPP_NODISCARD Result + getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD ResultValue + getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_external_memory_rdma === + + template + VULKAN_HPP_NODISCARD Result getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_pipeline_properties === + + template + VULKAN_HPP_NODISCARD Result getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, + VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_opacity_micromap === + + template + VULKAN_HPP_NODISCARD Result createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_pageable_device_local_memory === + + template + void setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + float priority, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance4 === + + template + void getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SparseImageMemoryRequirements2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VALVE_descriptor_set_host_mapping === + + template + void getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE + getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD void * getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands_compute === + + template + void getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_module_identifier === + + template + void getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_optical_flow === + + template + VULKAN_HPP_NODISCARD Result createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance5 === + + template + void getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_object === + + template + VULKAN_HPP_NODISCARD Result createShadersEXT( uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = ShaderEXTAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD typename ResultValueType, ShaderEXTAllocator>>::type + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = ShaderEXTAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, ShaderEXTAllocator>>::type + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = Uint8_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderBinaryDataEXT( + VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_properties === + + template + VULKAN_HPP_NODISCARD Result getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + uint32_t * pPropertiesCount, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = TilePropertiesQCOMAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + Result getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + template + VULKAN_HPP_NODISCARD Result setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV + getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + template + VULKAN_HPP_NODISCARD Result getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsKHR( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = Uint64_tAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkDevice() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + using NativeType = VkDisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayModeKHR() = default; + + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( displayModeKHR ) {} + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) + DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeKHR const & ) const = default; +#else + bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == rhs.m_displayModeKHR; + } + + bool operator!=( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != rhs.m_displayModeKHR; + } + + bool operator<( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR < rhs.m_displayModeKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + using NativeType = VkPhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + VULKAN_HPP_CONSTEXPR PhysicalDevice() = default; + + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( physicalDevice ) {} + + PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = physicalDevice; + return *this; + } + + PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice const & ) const = default; +#else + bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == rhs.m_physicalDevice; + } + + bool operator!=( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != rhs.m_physicalDevice; + } + + bool operator<( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice < rhs.m_physicalDevice; + } +#endif + + //=== VK_VERSION_1_0 === + + template + void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures + getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties + getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties + getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = QueueFamilyPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties + getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Device * pDevice, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = ExtensionPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = LayerPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SparseImageFormatPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + template + void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = QueueFamilyProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SparseImageFormatProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD Result getToolProperties( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getToolProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PhysicalDeviceToolPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_surface === + + template + VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSurfaceSupportKHR( + uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SurfaceFormatKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PresentModeKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = Rect2DAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = DisplayPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = DisplayPlanePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = DisplayKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( + uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = DisplayModePropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getDisplayPlaneCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, + Display & dpy, + VisualID visualID, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_video_queue === + + template + VULKAN_HPP_NODISCARD Result getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = VideoFormatPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_external_memory_capabilities === + + template + VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_physical_device_properties2 === + + template + void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = QueueFamilyProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SparseImageFormatProperties2Allocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_memory_capabilities === + + template + void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_semaphore_capabilities === + + template + void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + void releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + template + VULKAN_HPP_NODISCARD Result acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_fence_capabilities === + + template + void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD Result + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename PerformanceCounterDescriptionKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PerformanceCounterKHRAllocator, + typename B2 = PerformanceCounterDescriptionKHRAllocator, + typename std::enable_if::value && + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType, + std::vector>>::type + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint32_t + getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_surface_capabilities2 === + + template + VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = SurfaceFormat2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = StructureChainAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_display_properties2 === + + template + VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = DisplayProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = DisplayPlaneProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = DisplayModeProperties2KHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_sample_locations === + + template + void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = TimeDomainKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_fragment_shading_rate === + + template + VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PhysicalDeviceFragmentShadingRateKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_tooling_info === + + template + VULKAN_HPP_NODISCARD Result getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PhysicalDeviceToolPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = CooperativeMatrixPropertiesNVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_coverage_reduction_mode === + + template + VULKAN_HPP_NODISCARD Result + getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t * pCombinationCount, + VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = FramebufferMixedSamplesCombinationNVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getSupportedFramebufferMixedSamplesCombinationsNV( FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + template + VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PresentModeKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_acquire_drm_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result acquireDrmDisplayEXT( int32_t drmFd, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type + acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId, + VULKAN_HPP_NAMESPACE::DisplayKHR * display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + template + VULKAN_HPP_NODISCARD Result + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + + template + VULKAN_HPP_NODISCARD Result getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = OpticalFlowImageFormatPropertiesNVAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = CooperativeMatrixPropertiesKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = TimeDomainKHRAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Instance + { + public: + using CType = VkInstance; + using NativeType = VkInstance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: + VULKAN_HPP_CONSTEXPR Instance() = default; + + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {} + + Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT + { + m_instance = instance; + return *this; + } + + Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_instance = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Instance const & ) const = default; +#else + bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance == rhs.m_instance; + } + + bool operator!=( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance != rhs.m_instance; + } + + bool operator<( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance < rhs.m_instance; + } +#endif + + //=== VK_VERSION_1_0 === + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, + VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PhysicalDeviceAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PhysicalDeviceGroupPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_surface === + + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + template + VULKAN_HPP_NODISCARD Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + template + VULKAN_HPP_NODISCARD Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + template + VULKAN_HPP_NODISCARD Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + template + VULKAN_HPP_NODISCARD Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + template + VULKAN_HPP_NODISCARD Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + template + VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + template + VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type createStreamDescriptorSurfaceGGPUnique( + const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_GGP*/ + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + template + VULKAN_HPP_NODISCARD Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + + template + VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = PhysicalDeviceGroupPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + template + VULKAN_HPP_NODISCARD Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + template + VULKAN_HPP_NODISCARD Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + template + VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + template + VULKAN_HPP_NODISCARD Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_headless_surface === + + template + VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + template + VULKAN_HPP_NODISCARD Result createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + template + VULKAN_HPP_NODISCARD Result createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + operator VkInstance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Instance * pInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = ExtensionPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceExtensionProperties( Optional layerName, + ExtensionPropertiesAllocator & extensionPropertiesAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B1 = LayerPropertiesAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( uint32_t * pApiVersion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +} // namespace VULKAN_HPP_NAMESPACE + +// operators to compare vk::-handles with nullptr +template +typename std::enable_if::value, bool>::type operator==( const T & v, std::nullptr_t ) +{ + return !v; +} + +template +typename std::enable_if::value, bool>::type operator==( std::nullptr_t, const T & v ) +{ + return !v; +} + +template +typename std::enable_if::value, bool>::type operator!=( const T & v, std::nullptr_t ) +{ + return v; +} + +template +typename std::enable_if::value, bool>::type operator!=( std::nullptr_t, const T & v ) +{ + return v; +} +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_hash.hpp b/MacroLibX/third_party/vulkan/vulkan_hash.hpp new file mode 100644 index 0000000..d17c9e4 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_hash.hpp @@ -0,0 +1,16469 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HASH_HPP +#define VULKAN_HASH_HPP + +#include + +namespace std +{ + //======================================= + //=== HASH structures for Flags types === + //======================================= + + template + struct hash> + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Flags const & flags ) const VULKAN_HPP_NOEXCEPT + { + return std::hash::type>{}( static_cast::type>( flags ) ); + } + }; + + //=================================== + //=== HASH structures for handles === + //=================================== + + //=== VK_VERSION_1_0 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Instance const & instance ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( instance ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice const & physicalDevice ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( physicalDevice ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Device const & device ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( device ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Queue const & queue ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queue ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemory const & deviceMemory ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deviceMemory ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Fence const & fence ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( fence ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Semaphore const & semaphore ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( semaphore ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Event const & event ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( event ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPool const & queryPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( queryPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Buffer const & buffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( buffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferView const & bufferView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( bufferView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Image const & image ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( image ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageView const & imageView ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( imageView ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModule const & shaderModule ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderModule ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCache const & pipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineCache ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Pipeline const & pipeline ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipeline ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayout const & pipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Sampler const & sampler ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( sampler ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPool const & descriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSet const & descriptorSet ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSet ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorSetLayout ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Framebuffer const & framebuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( framebuffer ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPass const & renderPass ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( renderPass ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPool const & commandPool ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( commandPool ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBuffer const & commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( commandBuffer ) ); + } + }; + + //=== VK_VERSION_1_1 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( samplerYcbcrConversion ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( descriptorUpdateTemplate ) ); + } + }; + + //=== VK_VERSION_1_3 === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlot const & privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( privateDataSlot ) ); + } + }; + + //=== VK_KHR_surface === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceKHR const & surfaceKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( surfaceKHR ) ); + } + }; + + //=== VK_KHR_swapchain === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainKHR const & swapchainKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( swapchainKHR ) ); + } + }; + + //=== VK_KHR_display === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayKHR const & displayKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeKHR const & displayModeKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( displayModeKHR ) ); + } + }; + + //=== VK_EXT_debug_report === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugReportCallbackEXT ) ); + } + }; + + //=== VK_KHR_video_queue === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionKHR const & videoSessionKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( videoSessionKHR ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( videoSessionParametersKHR ) ); + } + }; + + //=== VK_NVX_binary_import === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleNVX const & cuModuleNVX ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cuModuleNVX ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionNVX const & cuFunctionNVX ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cuFunctionNVX ) ); + } + }; + + //=== VK_EXT_debug_utils === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( debugUtilsMessengerEXT ) ); + } + }; + + //=== VK_KHR_acceleration_structure === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( accelerationStructureKHR ) ); + } + }; + + //=== VK_EXT_validation_cache === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( validationCacheEXT ) ); + } + }; + + //=== VK_NV_ray_tracing === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( accelerationStructureNV ) ); + } + }; + + //=== VK_INTEL_performance_query === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( performanceConfigurationINTEL ) ); + } + }; + + //=== VK_KHR_deferred_host_operations === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( deferredOperationKHR ) ); + } + }; + + //=== VK_NV_device_generated_commands === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( indirectCommandsLayoutNV ) ); + } + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleNV const & cudaModuleNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cudaModuleNV ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionNV const & cudaFunctionNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cudaFunctionNV ) ); + } + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & bufferCollectionFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( bufferCollectionFUCHSIA ) ); + } + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapEXT const & micromapEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( micromapEXT ) ); + } + }; + + //=== VK_NV_optical_flow === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & opticalFlowSessionNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( opticalFlowSessionNV ) ); + } + }; + + //=== VK_EXT_shader_object === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderEXT const & shaderEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderEXT ) ); + } + }; + +#if 14 <= VULKAN_HPP_CPP_VERSION + //====================================== + //=== HASH structures for structures === + //====================================== + +# if !defined( VULKAN_HPP_HASH_COMBINE ) +# define VULKAN_HPP_HASH_COMBINE( seed, value ) \ + seed ^= std::hash::type>{}( value ) + 0x9e3779b9 + ( seed << 6 ) + ( seed >> 2 ) +# endif + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AabbPositionsKHR const & aabbPositionsKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minX ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minY ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.minZ ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxX ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxY ); + VULKAN_HPP_HASH_COMBINE( seed, aabbPositionsKHR.maxZ ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const & accelerationStructureBuildRangeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.primitiveOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildRangeInfoKHR.transformOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const & accelerationStructureBuildSizesInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.accelerationStructureSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.updateScratchSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureBuildSizesInfoKHR.buildScratchSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT const & accelerationStructureCaptureDescriptorDataInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.accelerationStructureNV ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & accelerationStructureCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.createFlags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.offset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.size ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoKHR.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & geometryTrianglesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexStride ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.vertexFormat ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.indexType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.transformData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryTrianglesNV.transformOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & geometryAABBNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.aabbData ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.numAABBs ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.stride ); + VULKAN_HPP_HASH_COMBINE( seed, geometryAABBNV.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometryDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryDataNV.triangles ); + VULKAN_HPP_HASH_COMBINE( seed, geometryDataNV.aabbs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeometryNV const & geometryNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.geometryType ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.geometry ); + VULKAN_HPP_HASH_COMBINE( seed, geometryNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & accelerationStructureInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.geometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInfoNV.pGeometries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & accelerationStructureCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.compactedSize ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCreateInfoNV.info ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const & accelerationStructureDeviceAddressInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureDeviceAddressInfoKHR.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformMatrixKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < 3; ++i ) + { + for ( size_t j = 0; j < 4; ++j ) + { + VULKAN_HPP_HASH_COMBINE( seed, transformMatrixKHR.matrix[i][j] ); + } + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & accelerationStructureInstanceKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.transform ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.mask ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureInstanceKHR.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & accelerationStructureMatrixMotionInstanceNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT0 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.transformT1 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.mask ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMatrixMotionInstanceNV.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & accelerationStructureMemoryRequirementsInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMemoryRequirementsInfoNV.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV const & accelerationStructureMotionInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.maxInstances ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureMotionInfoNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SRTDataNV const & sRTDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.a ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.b ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.c ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.sz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.pvz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qy ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qz ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.qw ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.tx ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.ty ); + VULKAN_HPP_HASH_COMBINE( seed, sRTDataNV.tz ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & accelerationStructureSRTMotionInstanceNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT0 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.transformT1 ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.instanceCustomIndex ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.mask ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.instanceShaderBindingTableRecordOffset ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureSRTMotionInstanceNV.accelerationStructureReference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapUsageEXT const & micromapUsageEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.count ); + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.subdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const & accelerationStructureVersionInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureVersionInfoKHR.pVersionData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & acquireNextImageInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.timeout ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, acquireNextImageInfoKHR.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & acquireProfilingLockInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, acquireProfilingLockInfoKHR.timeout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AllocationCallbacks const & allocationCallbacks ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pUserData ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnReallocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnFree ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnInternalAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, allocationCallbacks.pfnInternalFree ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC const & amigoProfilingSubmitInfoSEC ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.sType ); + VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.firstDrawTimestamp ); + VULKAN_HPP_HASH_COMBINE( seed, amigoProfilingSubmitInfoSEC.swapBufferTimestamp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComponentMapping const & componentMapping ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.r ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.g ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.b ); + VULKAN_HPP_HASH_COMBINE( seed, componentMapping.a ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID const & androidHardwareBufferFormatProperties2ANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.format ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatProperties2ANDROID.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & androidHardwareBufferFormatPropertiesANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.format ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatPropertiesANDROID.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID const & androidHardwareBufferFormatResolvePropertiesANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.colorAttachmentFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & androidHardwareBufferPropertiesANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferPropertiesANDROID.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & androidHardwareBufferUsageANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferUsageANDROID.androidHardwareBufferUsage ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & androidSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, androidSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ApplicationInfo const & applicationInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.pNext ); + for ( const char * p = applicationInfo.pApplicationName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.applicationVersion ); + for ( const char * p = applicationInfo.pEngineName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.engineVersion ); + VULKAN_HPP_HASH_COMBINE( seed, applicationInfo.apiVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription const & attachmentDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.flags ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.format ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.samples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.loadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.storeOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.stencilLoadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.stencilStoreOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.initialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription.finalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & attachmentDescription2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.format ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.samples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.loadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.storeOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.stencilLoadOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.stencilStoreOp ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.initialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescription2.finalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const & attachmentDescriptionStencilLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.stencilInitialLayout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentDescriptionStencilLayout.stencilFinalLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReference const & attachmentReference ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.attachment ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference.layout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReference2 const & attachmentReference2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.attachment ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.layout ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReference2.aspectMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const & attachmentReferenceStencilLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentReferenceStencilLayout.stencilLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD const & attachmentSampleCountInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.pColorAttachmentSamples ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleCountInfoAMD.depthStencilAttachmentSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Extent2D const & extent2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, extent2D.width ); + VULKAN_HPP_HASH_COMBINE( seed, extent2D.height ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SampleLocationEXT const & sampleLocationEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.x ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationEXT.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationsPerPixel ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.sampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, sampleLocationsInfoEXT.pSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const & attachmentSampleLocationsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.attachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, attachmentSampleLocationsEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BaseInStructure const & baseInStructure ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, baseInStructure.sType ); + VULKAN_HPP_HASH_COMBINE( seed, baseInStructure.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BaseOutStructure const & baseOutStructure ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, baseOutStructure.sType ); + VULKAN_HPP_HASH_COMBINE( seed, baseOutStructure.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & bindAccelerationStructureMemoryInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindAccelerationStructureMemoryInfoNV.pDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & bindBufferMemoryDeviceGroupInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryDeviceGroupInfo.pDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & bindBufferMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindBufferMemoryInfo.memoryOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT const & bindDescriptorBufferEmbeddedSamplersInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.layout ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.set ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR const & bindDescriptorSetsInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.layout ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.firstSet ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.pDescriptorSets ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.dynamicOffsetCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.pDynamicOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Offset2D const & offset2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, offset2D.x ); + VULKAN_HPP_HASH_COMBINE( seed, offset2D.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Rect2D const & rect2D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rect2D.offset ); + VULKAN_HPP_HASH_COMBINE( seed, rect2D.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & bindImageMemoryDeviceGroupInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.deviceIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pDeviceIndices ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.splitInstanceBindRegionCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryDeviceGroupInfo.pSplitInstanceBindRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & bindImageMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemoryInfo.memoryOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & bindImageMemorySwapchainInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, bindImageMemorySwapchainInfoKHR.imageIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & bindImagePlaneMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindImagePlaneMemoryInfo.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const & bindIndexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandNV.indexType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindMemoryStatusKHR const & bindMemoryStatusKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatusKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatusKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatusKHR.pResult ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV const & bindPipelineIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindPipelineIndirectCommandNV.pipelineAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const & bindShaderGroupIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindShaderGroupIndirectCommandNV.groupIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseMemoryBind const & sparseMemoryBind ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.resourceOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.size ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.memory ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseMemoryBind.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const & sparseBufferMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseBufferMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const & sparseImageOpaqueMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageOpaqueMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.mipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource.arrayLayer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Offset3D const & offset3D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, offset3D.x ); + VULKAN_HPP_HASH_COMBINE( seed, offset3D.y ); + VULKAN_HPP_HASH_COMBINE( seed, offset3D.z ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Extent3D const & extent3D ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, extent3D.width ); + VULKAN_HPP_HASH_COMBINE( seed, extent3D.height ); + VULKAN_HPP_HASH_COMBINE( seed, extent3D.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const & sparseImageMemoryBind ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.subresource ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.offset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.extent ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.memory ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBind.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const & sparseImageMemoryBindInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.bindCount ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryBindInfo.pBinds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindSparseInfo const & bindSparseInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.bufferBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pBufferBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.imageOpaqueBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pImageOpaqueBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.imageBindCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pImageBinds ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindSparseInfo.pSignalSemaphores ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const & bindVertexBufferIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandNV.stride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR const & bindVideoSessionMemoryInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memoryBindIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memoryOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bindVideoSessionMemoryInfoKHR.memorySize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM const & blitImageCubicWeightsInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.cubicWeights ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresourceLayers ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.mipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceLayers.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageBlit2 const & imageBlit2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.srcSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.srcOffsets[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.dstSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit2.dstOffsets[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BlitImageInfo2 const & blitImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.pRegions ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageInfo2.filter ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT const & bufferCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.buffer ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA const & bufferCollectionBufferCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionBufferCreateInfoFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.maxBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForCamping ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForDedicatedSlack ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionConstraintsInfoFUCHSIA.minBufferCountForSharedSlack ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & bufferCollectionCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionCreateInfoFUCHSIA.collectionToken ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA const & bufferCollectionImageCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionImageCreateInfoFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sysmemColorSpaceFUCHSIA.colorSpace ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA const & bufferCollectionPropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.memoryTypeBits ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.bufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.createInfoIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sysmemPixelFormat ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.sysmemColorSpaceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCollectionPropertiesFUCHSIA.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & bufferCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.size ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCreateInfo.pQueueFamilyIndices ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA const & bufferConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.createInfo ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.requiredFormatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, bufferConstraintsInfoFUCHSIA.bufferCollectionConstraints ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCopy const & bufferCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCopy2 const & bufferCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCopy2.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & bufferDeviceAddressCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressCreateInfoEXT.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & bufferDeviceAddressInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferDeviceAddressInfo.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferImageCopy const & bufferImageCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferImageCopy2 const & bufferImageCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferImageCopy2.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & bufferMemoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 const & bufferMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryBarrier2.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & bufferMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferMemoryRequirementsInfo2.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const & bufferOpaqueCaptureAddressCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferOpaqueCaptureAddressCreateInfo.opaqueCaptureAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR const & bufferUsageFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.usage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & bufferViewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, bufferViewCreateInfo.range ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR const & calibratedTimestampInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.timeDomain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CheckpointData2NV const & checkpointData2NV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.stage ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointData2NV.pCheckpointMarker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CheckpointDataNV const & checkpointDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.stage ); + VULKAN_HPP_HASH_COMBINE( seed, checkpointDataNV.pCheckpointMarker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & clearDepthStencilValue ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.depth ); + VULKAN_HPP_HASH_COMBINE( seed, clearDepthStencilValue.stencil ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClearRect const & clearRect ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clearRect.rect ); + VULKAN_HPP_HASH_COMBINE( seed, clearRect.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, clearRect.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const & coarseSampleLocationNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelX ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelY ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.sample ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const & coarseSampleOrderCustomNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.shadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleCount ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleLocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.pSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const & colorBlendAdvancedEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.advancedBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.srcPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.dstPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.blendOverlap ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.clampResults ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const & colorBlendEquationEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.colorBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.alphaBlendOp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandPool ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.level ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandBufferCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & commandBufferInheritanceInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.framebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.occlusionQueryEnable ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.queryFlags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pipelineStatistics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & commandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pInheritanceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & commandBufferInheritanceConditionalRenderingInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.conditionalRenderingEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const & commandBufferInheritanceRenderPassTransformInfoQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.transform ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.renderArea ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo const & commandBufferInheritanceRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.stencilAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderingInfo.rasterizationSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Viewport const & viewport ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewport.x ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.y ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.width ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.height ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.minDepth ); + VULKAN_HPP_HASH_COMBINE( seed, viewport.maxDepth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const & commandBufferInheritanceViewportScissorInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.viewportScissor2D ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.viewportDepthCount ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceViewportScissorInfoNV.pViewportDepths ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo const & commandBufferSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.commandBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferSubmitInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & commandPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandPoolCreateInfo.queueFamilyIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SpecializationMapEntry const & specializationMapEntry ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.constantID ); + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.offset ); + VULKAN_HPP_HASH_COMBINE( seed, specializationMapEntry.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SpecializationInfo const & specializationInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.mapEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.pMapEntries ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, specializationInfo.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & pipelineShaderStageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.stage ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.module ); + for ( const char * p = pipelineShaderStageCreateInfo.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageCreateInfo.pSpecializationInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & computePipelineCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.stage ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineCreateInfo.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV const & computePipelineIndirectBufferInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pipelineDeviceAddressCaptureReplay ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & conditionalRenderingBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.offset ); + VULKAN_HPP_HASH_COMBINE( seed, conditionalRenderingBeginInfoEXT.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ConformanceVersion const & conformanceVersion ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.major ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.minor ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.subminor ); + VULKAN_HPP_HASH_COMBINE( seed, conformanceVersion.patch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR const & cooperativeMatrixPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.MSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.NSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.KSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.ResultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.saturatingAccumulation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.scope ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & cooperativeMatrixPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.MSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.NSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.KSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.DType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesNV.scope ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const & copyAccelerationStructureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.src ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.dst ); + VULKAN_HPP_HASH_COMBINE( seed, copyAccelerationStructureInfoKHR.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 const & copyBufferInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 const & copyBufferToImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyBufferToImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const & copyCommandTransformInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyCommandTransformInfoQCOM.transform ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & copyDescriptorSet ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcSet ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcBinding ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.srcArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstSet ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, copyDescriptorSet.descriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCopy2 const & imageCopy2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy2.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageInfo2 const & copyImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 const & copyImageToBufferInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToBufferInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT const & copyImageToImageInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT const & imageToMemoryCopyEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT const & copyImageToMemoryInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const & copyMemoryIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.dstAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV const & copyMemoryToImageIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT const & memoryToImageCopyEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT const & copyMemoryToImageInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const & copyMicromapInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.src ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.dst ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & cuFunctionCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuFunctionCreateInfoNVX.module ); + for ( const char * p = cuFunctionCreateInfoNVX.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const & cuLaunchInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.function ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.gridDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.blockDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.sharedMemBytes ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.paramCount ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pParams ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.extraCount ); + VULKAN_HPP_HASH_COMBINE( seed, cuLaunchInfoNVX.pExtras ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & cuModuleCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleCreateInfoNVX.pData ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & cudaFunctionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.module ); + for ( const char * p = cudaFunctionCreateInfoNV.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV const & cudaLaunchInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.function ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sharedMemBytes ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.paramCount ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pParams ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.extraCount ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pExtras ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & cudaModuleCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pData ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & d3D12FenceSubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.waitSemaphoreValuesCount ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pWaitSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.signalSemaphoreValuesCount ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pSignalSemaphoreValues ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & debugMarkerMarkerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.pNext ); + for ( const char * p = debugMarkerMarkerInfoEXT.pMarkerName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerMarkerInfoEXT.color[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & debugMarkerObjectNameInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectNameInfoEXT.object ); + for ( const char * p = debugMarkerObjectNameInfoEXT.pObjectName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & debugMarkerObjectTagInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.object ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, debugMarkerObjectTagInfoEXT.pTag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & debugReportCallbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pfnCallback ); + VULKAN_HPP_HASH_COMBINE( seed, debugReportCallbackCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & debugUtilsLabelEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.pNext ); + for ( const char * p = debugUtilsLabelEXT.pLabelName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsLabelEXT.color[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & debugUtilsObjectNameInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectNameInfoEXT.objectHandle ); + for ( const char * p = debugUtilsObjectNameInfoEXT.pObjectName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & debugUtilsMessengerCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.flags ); + for ( const char * p = debugUtilsMessengerCallbackDataEXT.pMessageIdName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.messageIdNumber ); + for ( const char * p = debugUtilsMessengerCallbackDataEXT.pMessage; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.queueLabelCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pQueueLabels ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.cmdBufLabelCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pCmdBufLabels ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.objectCount ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCallbackDataEXT.pObjects ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & debugUtilsMessengerCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.messageSeverity ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.messageType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pfnUserCallback ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsMessengerCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & debugUtilsObjectTagInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.objectHandle ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, debugUtilsObjectTagInfoEXT.pTag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV const & decompressMemoryRegionNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.dstAddress ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.compressedSize ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressedSize ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressionMethod ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & dedicatedAllocationBufferCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationBufferCreateInfoNV.dedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & dedicatedAllocationImageCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationImageCreateInfoNV.dedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & dedicatedAllocationMemoryAllocateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.image ); + VULKAN_HPP_HASH_COMBINE( seed, dedicatedAllocationMemoryAllocateInfoNV.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryBarrier2 const & memoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier2.dstAccessMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & imageSubresourceRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.baseMipLevel ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.levelCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresourceRange.layerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 const & imageMemoryBarrier2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier2.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DependencyInfo const & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.dependencyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.memoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pMemoryBarriers ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.bufferMemoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pBufferMemoryBarriers ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.imageMemoryBarrierCount ); + VULKAN_HPP_HASH_COMBINE( seed, dependencyInfo.pImageMemoryBarriers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT const & depthBiasInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasConstantFactor ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasSlopeFactor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT const & depthBiasRepresentationInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.depthBiasRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.depthBiasExact ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT const & descriptorAddressInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.range ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT const & descriptorBufferBindingInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.usage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT const & + descriptorBufferBindingPushDescriptorBufferHandleEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const & descriptorBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferInfo.range ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorImageInfo const & descriptorImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.sampler ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorImageInfo.imageLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolSize const & descriptorPoolSize ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.type ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolSize.descriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & descriptorPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.maxSets ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.poolSizeCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolCreateInfo.pPoolSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo const & descriptorPoolInlineUniformBlockCreateInfo ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorPoolInlineUniformBlockCreateInfo.maxInlineUniformBlockBindings ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & descriptorSetAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.descriptorPool ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetAllocateInfo.pSetLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE const & descriptorSetBindingReferenceVALVE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.descriptorSetLayout ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetBindingReferenceVALVE.binding ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const & descriptorSetLayoutBinding ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.binding ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBinding.pImmutableSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const & descriptorSetLayoutBindingFlagsCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.bindingCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutBindingFlagsCreateInfo.pBindingFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & descriptorSetLayoutCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.bindingCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutCreateInfo.pBindings ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE const & descriptorSetLayoutHostMappingInfoVALVE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.descriptorOffset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutHostMappingInfoVALVE.descriptorSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & descriptorSetLayoutSupport ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetLayoutSupport.supported ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const & descriptorSetVariableDescriptorCountAllocateInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountAllocateInfo.pDescriptorCounts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const & descriptorSetVariableDescriptorCountLayoutSupport ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorSetVariableDescriptorCountLayoutSupport.maxVariableDescriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const & descriptorUpdateTemplateEntry ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.offset ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateEntry.stride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & descriptorUpdateTemplateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.descriptorUpdateEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pDescriptorUpdateEntries ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.templateType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.descriptorSetLayout ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.pipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorUpdateTemplateCreateInfo.set ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const & deviceAddressBindingCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.baseAddress ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.bindingType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements const & deviceBufferMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceBufferMemoryRequirements.pCreateInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & deviceQueueCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.queueCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueCreateInfo.pQueuePriorities ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & physicalDeviceFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.robustBufferAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fullDrawIndexUint32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.imageCubeArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.independentBlend ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.geometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.tessellationShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sampleRateShading ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.dualSrcBlend ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.logicOp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.multiDrawIndirect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.drawIndirectFirstInstance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthClamp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fillModeNonSolid ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.depthBounds ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.wideLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.largePoints ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.alphaToOne ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.multiViewport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.samplerAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionETC2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionASTC_LDR ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.textureCompressionBC ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.occlusionQueryPrecise ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.pipelineStatisticsQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.vertexPipelineStoresAndAtomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.fragmentStoresAndAtomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderTessellationAndGeometryPointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderImageGatherExtended ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageExtendedFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageMultisample ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageReadWithoutFormat ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageWriteWithoutFormat ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderStorageImageArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderClipDistance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderCullDistance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderInt64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderInt16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderResourceResidency ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.shaderResourceMinLod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyImage2D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyImage3D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency2Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency4Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency8Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidency16Samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.sparseResidencyAliased ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.variableMultisampleRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures.inheritedQueries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & deviceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.queueCreateInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pQueueCreateInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.enabledLayerCount ); + for ( size_t i = 0; i < deviceCreateInfo.enabledLayerCount; ++i ) + { + for ( const char * p = deviceCreateInfo.ppEnabledLayerNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.enabledExtensionCount ); + for ( size_t i = 0; i < deviceCreateInfo.enabledExtensionCount; ++i ) + { + for ( const char * p = deviceCreateInfo.ppEnabledExtensionNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, deviceCreateInfo.pEnabledFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const & deviceDeviceMemoryReportCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pfnUserCallback ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDeviceMemoryReportCreateInfoEXT.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const & deviceDiagnosticsConfigCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceDiagnosticsConfigCreateInfoNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceEventInfoEXT.deviceEvent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const & deviceFaultAddressInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.reportedAddress ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressPrecision ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const & deviceFaultCountsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.addressInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorBinarySize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const & deviceFaultVendorInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultCode ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const & deviceFaultInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pNext ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pAddressInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorBinaryData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const & deviceFaultVendorBinaryHeaderVersionOneEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerSize ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.driverVersion ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.pipelineCacheUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationNameOffset ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineNameOffset ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.apiVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & deviceGroupBindSparseInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.resourceDeviceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupBindSparseInfo.memoryDeviceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & deviceGroupCommandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupCommandBufferBeginInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & deviceGroupDeviceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.physicalDeviceCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupDeviceCreateInfo.pPhysicalDevices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & deviceGroupPresentCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.pNext ); + for ( size_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.presentMask[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentCapabilitiesKHR.modes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & deviceGroupPresentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.pDeviceMasks ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupPresentInfoKHR.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & deviceGroupRenderPassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.deviceMask ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.deviceRenderAreaCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupRenderPassBeginInfo.pDeviceRenderAreas ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & deviceGroupSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pWaitSemaphoreDeviceIndices ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.commandBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pCommandBufferDeviceMasks ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSubmitInfo.pSignalSemaphoreDeviceIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & deviceGroupSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceGroupSwapchainCreateInfoKHR.modes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.imageType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.extent ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.mipLevels ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.arrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.samples ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.tiling ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.pQueueFamilyIndices ); + VULKAN_HPP_HASH_COMBINE( seed, imageCreateInfo.initialLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements const & deviceImageMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.pCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageMemoryRequirements.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR const & imageSubresource2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.imageSubresource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR const & deviceImageSubresourceInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pSubresource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & deviceMemoryOpaqueCaptureAddressInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOpaqueCaptureAddressInfo.memory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & deviceMemoryOverallocationCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryOverallocationCreateInfoAMD.overallocationBehavior ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const & deviceMemoryReportCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.memoryObjectId ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.objectType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.objectHandle ); + VULKAN_HPP_HASH_COMBINE( seed, deviceMemoryReportCallbackDataEXT.heapIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo const & devicePrivateDataCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, devicePrivateDataCreateInfo.privateDataSlotRequestCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const & deviceQueueGlobalPriorityCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.globalPriority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & deviceQueueInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueInfo2.queueIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM const & deviceQueueShaderCoreControlCreateInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.shaderCoreCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG const & directDriverLoadingInfoLUNARG ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.flags ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.pfnGetInstanceProcAddr ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG const & directDriverLoadingListLUNARG ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.mode ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.driverCount ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.pDrivers ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & directFBSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.dfb ); + VULKAN_HPP_HASH_COMBINE( seed, directFBSurfaceCreateInfoEXT.surface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const & dispatchIndirectCommand ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.x ); + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.y ); + VULKAN_HPP_HASH_COMBINE( seed, dispatchIndirectCommand.z ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayEventInfoEXT.displayEvent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & displayModeParametersKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.visibleRegion ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeParametersKHR.refreshRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & displayModeCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeCreateInfoKHR.parameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const & displayModePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.displayMode ); + VULKAN_HPP_HASH_COMBINE( seed, displayModePropertiesKHR.parameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & displayModeProperties2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeProperties2KHR.displayModeProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & displayNativeHdrSurfaceCapabilitiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayNativeHdrSurfaceCapabilitiesAMD.localDimmingSupport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const & displayPlaneCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.supportedAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minSrcPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxSrcPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minSrcExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxSrcExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minDstPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxDstPosition ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.minDstExtent ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilitiesKHR.maxDstExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & displayPlaneCapabilities2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneCapabilities2KHR.capabilities ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & displayPlaneInfo2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.mode ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneInfo2KHR.planeIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const & displayPlanePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentDisplay ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlanePropertiesKHR.currentStackIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & displayPlaneProperties2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPlaneProperties2KHR.displayPlaneProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & displayPowerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPowerInfoEXT.powerState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & displayPresentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.srcRect ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.dstRect ); + VULKAN_HPP_HASH_COMBINE( seed, displayPresentInfoKHR.persistent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const & displayPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.display ); + for ( const char * p = displayPropertiesKHR.displayName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.physicalDimensions ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.physicalResolution ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.planeReorderPossible ); + VULKAN_HPP_HASH_COMBINE( seed, displayPropertiesKHR.persistentContent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & displayProperties2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayProperties2KHR.displayProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & displaySurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.displayMode ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.planeIndex ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.planeStackIndex ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.transform ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.globalAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.alphaMode ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceCreateInfoKHR.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const & drawIndexedIndirectCommand ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.firstIndex ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.vertexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndexedIndirectCommand.firstInstance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndirectCommand const & drawIndirectCommand ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCommand.firstInstance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT const & drawMeshTasksIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountX ); + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountY ); + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandEXT.groupCountZ ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const & drawMeshTasksIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.taskCount ); + VULKAN_HPP_HASH_COMBINE( seed, drawMeshTasksIndirectCommandNV.firstTask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT const & drmFormatModifierProperties2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierProperties2EXT.drmFormatModifierTilingFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const & drmFormatModifierPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesEXT.drmFormatModifierTilingFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT const & drmFormatModifierPropertiesList2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesList2EXT.pDrmFormatModifierProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & drmFormatModifierPropertiesListEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, drmFormatModifierPropertiesListEXT.pDrmFormatModifierProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::EventCreateInfo const & eventCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, eventCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & executionGraphPipelineCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.flags ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pLibraryInfo ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.layout ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineIndex ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX const & executionGraphPipelineScratchSizeAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.size ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & exportFenceWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & exportMemoryAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & exportMemoryAllocateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryAllocateInfoNV.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & exportMemoryWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & exportMemoryWin32HandleInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportMemoryWin32HandleInfoNV.dwAccess ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT const & exportMetalBufferInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.memory ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalBufferInfoEXT.mtlBuffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT const & exportMetalCommandQueueInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.queue ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalCommandQueueInfoEXT.mtlCommandQueue ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT const & exportMetalDeviceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalDeviceInfoEXT.mtlDevice ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT const & exportMetalIOSurfaceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.image ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalIOSurfaceInfoEXT.ioSurface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT const & exportMetalObjectCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectCreateInfoEXT.exportObjectType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT const & exportMetalObjectsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalObjectsInfoEXT.pNext ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT const & exportMetalSharedEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.event ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalSharedEventInfoEXT.mtlSharedEvent ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT const & exportMetalTextureInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.image ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.bufferView ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.plane ); + VULKAN_HPP_HASH_COMBINE( seed, exportMetalTextureInfoEXT.mtlTexture ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & exportSemaphoreCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreCreateInfo.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & exportSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.pAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.dwAccess ); + VULKAN_HPP_HASH_COMBINE( seed, exportSemaphoreWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExtensionProperties const & extensionProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, extensionProperties.extensionName[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, extensionProperties.specVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const & externalMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.externalMemoryFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryProperties.compatibleHandleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & externalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalBufferProperties.externalMemoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & externalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.compatibleHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalFenceProperties.externalFenceFeatures ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & externalFormatANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatANDROID.externalFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFormatQNX const & externalFormatQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.externalFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & externalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatProperties.externalMemoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties const & imageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxExtent ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxMipLevels ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.sampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties.maxResourceSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const & externalImageFormatPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.imageFormatProperties ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.externalMemoryFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalImageFormatPropertiesNV.compatibleHandleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT const & externalMemoryAcquireUnmodifiedEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.acquireUnmodifiedMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & externalMemoryBufferCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryBufferCreateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & externalMemoryImageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfo.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & externalMemoryImageCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryImageCreateInfoNV.handleTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & externalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.exportFromImportedHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.compatibleHandleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, externalSemaphoreProperties.externalSemaphoreFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & fenceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & fenceGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetFdInfoKHR.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & fenceGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, fenceGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & filterCubicImageViewImageFormatPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.filterCubic ); + VULKAN_HPP_HASH_COMBINE( seed, filterCubicImageViewImageFormatPropertiesEXT.filterCubicMinmax ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties const & formatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.linearTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.optimalTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties.bufferFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties2 const & formatProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties2.formatProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FormatProperties3 const & formatProperties3 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.sType ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.linearTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.optimalTilingFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, formatProperties3.bufferFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const & fragmentShadingRateAttachmentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.pFragmentShadingRateAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, fragmentShadingRateAttachmentInfoKHR.shadingRateAttachmentTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FrameBoundaryEXT const & frameBoundaryEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.frameID ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.imageCount ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pImages ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.bufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pTag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & framebufferAttachmentImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.width ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.height ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.viewFormatCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentImageInfo.pViewFormats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const & framebufferAttachmentsCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.attachmentImageInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferAttachmentsCreateInfo.pAttachmentImageInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & framebufferCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.width ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.height ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferCreateInfo.layers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & framebufferMixedSamplesCombinationNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.coverageReductionMode ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.rasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.depthStencilSamples ); + VULKAN_HPP_HASH_COMBINE( seed, framebufferMixedSamplesCombinationNV.colorSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const & indirectCommandsStreamNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsStreamNV.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const & generatedCommandsInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.streamCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.pStreams ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessOffset ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.preprocessSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCountBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesCountOffset ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesIndexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoNV.sequencesIndexOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const & generatedCommandsMemoryRequirementsInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoNV.maxSequencesCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV const & latencyTimingsFrameReportNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.inputSampleTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.simStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.simEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.renderSubmitStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.renderSubmitEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.driverStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.driverEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.osRenderQueueStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.osRenderQueueEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.gpuRenderStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.gpuRenderEndTimeUs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV const & getLatencyMarkerInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.timingCount ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.pTimings ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const & vertexInputBindingDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.stride ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription.inputRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const & vertexInputAttributeDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.location ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.format ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & pipelineVertexInputStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.vertexBindingDescriptionCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pVertexBindingDescriptions ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.vertexAttributeDescriptionCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputStateCreateInfo.pVertexAttributeDescriptions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & pipelineInputAssemblyStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.topology ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInputAssemblyStateCreateInfo.primitiveRestartEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & pipelineTessellationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationStateCreateInfo.patchControlPoints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & pipelineViewportStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pViewports ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.scissorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportStateCreateInfo.pScissors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & pipelineRasterizationStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthClampEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.rasterizerDiscardEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.polygonMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.cullMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.frontFace ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasConstantFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.depthBiasSlopeFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateCreateInfo.lineWidth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & pipelineMultisampleStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.rasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.sampleShadingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.minSampleShading ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.pSampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.alphaToCoverageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineMultisampleStateCreateInfo.alphaToOneEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StencilOpState const & stencilOpState ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.failOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.passOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.depthFailOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.compareOp ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.compareMask ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.writeMask ); + VULKAN_HPP_HASH_COMBINE( seed, stencilOpState.reference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & pipelineDepthStencilStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthWriteEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthCompareOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.depthBoundsTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.stencilTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.front ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.back ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.minDepthBounds ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDepthStencilStateCreateInfo.maxDepthBounds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const & pipelineColorBlendAttachmentState ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.blendEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.srcColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.dstColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.colorBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.srcAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.dstAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.alphaBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAttachmentState.colorWriteMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & pipelineColorBlendStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.logicOpEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.logicOp ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.pAttachments ); + for ( size_t i = 0; i < 4; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendStateCreateInfo.blendConstants[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & pipelineDynamicStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.dynamicStateCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDynamicStateCreateInfo.pDynamicStates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & graphicsPipelineCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pVertexInputState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pInputAssemblyState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pTessellationState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pViewportState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pRasterizationState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pMultisampleState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pDepthStencilState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pColorBlendState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.pDynamicState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineCreateInfo.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT const & graphicsPipelineLibraryCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineLibraryCreateInfoEXT.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const & graphicsShaderGroupCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pVertexInputState ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsShaderGroupCreateInfoNV.pTessellationState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const & graphicsPipelineShaderGroupsCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pipelineCount ); + VULKAN_HPP_HASH_COMBINE( seed, graphicsPipelineShaderGroupsCreateInfoNV.pPipelines ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XYColorEXT const & xYColorEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xYColorEXT.x ); + VULKAN_HPP_HASH_COMBINE( seed, xYColorEXT.y ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HdrMetadataEXT const & hdrMetadataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryRed ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryGreen ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.displayPrimaryBlue ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.whitePoint ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxLuminance ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.minLuminance ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxContentLightLevel ); + VULKAN_HPP_HASH_COMBINE( seed, hdrMetadataEXT.maxFrameAverageLightLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & headlessSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, headlessSurfaceCreateInfoEXT.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT const & hostImageCopyDevicePerformanceQueryEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.optimalDeviceAccess ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.identicalMemoryLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT const & hostImageLayoutTransitionInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.image ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.subresourceRange ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & iOSSurfaceCreateInfoMVK ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.sType ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.flags ); + VULKAN_HPP_HASH_COMBINE( seed, iOSSurfaceCreateInfoMVK.pView ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageBlit const & imageBlit ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.srcSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.srcOffsets[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.dstSubresource ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, imageBlit.dstOffsets[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT const & imageCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.image ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT const & imageCompressionControlEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.compressionControlPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionControlEXT.pFixedRateFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT const & imageCompressionPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.imageCompressionFlags ); + VULKAN_HPP_HASH_COMBINE( seed, imageCompressionPropertiesEXT.imageCompressionFixedRateFlags ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA const & imageFormatConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.imageCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.requiredFormatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.sysmemPixelFormat ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.colorSpaceCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatConstraintsInfoFUCHSIA.pColorSpaces ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA const & imageConstraintsInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.formatConstraintsCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.pFormatConstraints ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.bufferCollectionConstraints ); + VULKAN_HPP_HASH_COMBINE( seed, imageConstraintsInfoFUCHSIA.flags ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCopy const & imageCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageCopy.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout const & subresourceLayout ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.offset ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.size ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.rowPitch ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.arrayPitch ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout.depthPitch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & imageDrmFormatModifierExplicitCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.drmFormatModifierPlaneCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierExplicitCreateInfoEXT.pPlaneLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & imageDrmFormatModifierListCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.drmFormatModifierCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierListCreateInfoEXT.pDrmFormatModifiers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & imageDrmFormatModifierPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageDrmFormatModifierPropertiesEXT.drmFormatModifier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & imageFormatListCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.viewFormatCount ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatListCreateInfo.pViewFormats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & imageFormatProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageFormatProperties2.imageFormatProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & imageMemoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.srcQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.dstQueueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryBarrier.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & imageMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageMemoryRequirementsInfo2.image ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & imagePipeSurfaceCreateInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imagePipeSurfaceCreateInfoFUCHSIA.imagePipeHandle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & imagePlaneMemoryRequirementsInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imagePlaneMemoryRequirementsInfo.planeAspect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageResolve const & imageResolve ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageResolve2 const & imageResolve2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.srcSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.srcOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.dstSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.dstOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageResolve2.extent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & imageSparseMemoryRequirementsInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSparseMemoryRequirementsInfo2.image ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & imageStencilUsageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageStencilUsageCreateInfo.stencilUsage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & imageSwapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSwapchainCreateInfoKHR.swapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & imageViewASTCDecodeModeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewASTCDecodeModeEXT.decodeMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const & imageViewAddressPropertiesNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewAddressPropertiesNVX.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT const & imageViewCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.imageView ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & imageViewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.viewType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.components ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCreateInfo.subresourceRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & imageViewHandleInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewHandleInfoNVX.sampler ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT const & imageViewMinLodCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewMinLodCreateInfoEXT.minLod ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM const & imageViewSampleWeightCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.filterCenter ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.filterSize ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSampleWeightCreateInfoQCOM.numPhases ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT const & imageViewSlicedCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sliceOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sliceCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & imageViewUsageCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewUsageCreateInfo.usage ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & importAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importAndroidHardwareBufferInfoANDROID.buffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & importFenceFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceFdInfoKHR.fd ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & importFenceWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.fence ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importFenceWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA const & importMemoryBufferCollectionFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.collection ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryBufferCollectionFUCHSIA.index ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & importMemoryFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryFdInfoKHR.fd ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & importMemoryHostPointerInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryHostPointerInfoEXT.pHostPointer ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & importMemoryWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & importMemoryWin32HandleInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryWin32HandleInfoNV.handle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const & importMemoryZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryZirconHandleInfoFUCHSIA.handle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT const & importMetalBufferInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalBufferInfoEXT.mtlBuffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT const & importMetalIOSurfaceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalIOSurfaceInfoEXT.ioSurface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT const & importMetalSharedEventInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalSharedEventInfoEXT.mtlSharedEvent ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT const & importMetalTextureInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.plane ); + VULKAN_HPP_HASH_COMBINE( seed, importMetalTextureInfoEXT.mtlTexture ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX const & importScreenBufferInfoQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.buffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & importSemaphoreFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreFdInfoKHR.fd ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & importSemaphoreWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.handle ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreWin32HandleInfoKHR.name ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const & importSemaphoreZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.flags ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importSemaphoreZirconHandleInfoFUCHSIA.zirconHandle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const & indirectCommandsLayoutTokenNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.tokenType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.stream ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.offset ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.vertexBindingUnit ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.vertexDynamicStride ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantPipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantShaderStageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantOffset ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pushconstantSize ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.indirectStateFlags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.indexTypeCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pIndexTypes ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutTokenNV.pIndexTypeValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & indirectCommandsLayoutCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.tokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pTokens ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.streamCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoNV.pStreamStrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & initializePerformanceApiInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pUserData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const & inputAttachmentAspectReference ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.inputAttachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, inputAttachmentAspectReference.aspectMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & instanceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.pApplicationInfo ); + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.enabledLayerCount ); + for ( size_t i = 0; i < instanceCreateInfo.enabledLayerCount; ++i ) + { + for ( const char * p = instanceCreateInfo.ppEnabledLayerNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + VULKAN_HPP_HASH_COMBINE( seed, instanceCreateInfo.enabledExtensionCount ); + for ( size_t i = 0; i < instanceCreateInfo.enabledExtensionCount; ++i ) + { + for ( const char * p = instanceCreateInfo.ppEnabledExtensionNames[i]; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySleepInfoNV const & latencySleepInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.signalSemaphore ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.value ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV const & latencySleepModeInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.lowLatencyMode ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.lowLatencyBoost ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.minimumIntervalUs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV const & latencySubmissionPresentIdNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.presentID ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV const & latencySurfaceCapabilitiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerProperties const & layerProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.layerName[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.specVersion ); + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.implementationVersion ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, layerProperties.description[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerSettingEXT const & layerSettingEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( const char * p = layerSettingEXT.pLayerName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( const char * p = layerSettingEXT.pSettingName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.valueCount ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.pValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT const & layerSettingsCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.settingCount ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.pSettings ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & macOSSurfaceCreateInfoMVK ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.sType ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.flags ); + VULKAN_HPP_HASH_COMBINE( seed, macOSSurfaceCreateInfoMVK.pView ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & mappedMemoryRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.sType ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.memory ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.offset ); + VULKAN_HPP_HASH_COMBINE( seed, mappedMemoryRange.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & memoryAllocateFlagsInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateFlagsInfo.deviceMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & memoryAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, memoryAllocateInfo.memoryTypeIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryBarrier const & memoryBarrier ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrier.dstAccessMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & memoryDedicatedAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfo.buffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & memoryDedicatedRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.prefersDedicatedAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedRequirements.requiresDedicatedAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & memoryFdPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryFdPropertiesKHR.memoryTypeBits ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & memoryGetAndroidHardwareBufferInfoANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetAndroidHardwareBufferInfoANDROID.memory ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & memoryGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetFdInfoKHR.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV const & memoryGetRemoteAddressInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetRemoteAddressInfoNV.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & memoryGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const & memoryGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetZirconHandleInfoFUCHSIA.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryHeap const & memoryHeap ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryHeap.size ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHeap.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & memoryHostPointerPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryHostPointerPropertiesEXT.memoryTypeBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR const & memoryMapInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.offset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const & memoryOpaqueCaptureAddressAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryOpaqueCaptureAddressAllocateInfo.opaqueCaptureAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & memoryPriorityAllocateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryPriorityAllocateInfoEXT.priority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements const & memoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.size ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.alignment ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements.memoryTypeBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & memoryRequirements2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryRequirements2.memoryRequirements ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryType const & memoryType ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryType.propertyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryType.heapIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR const & memoryUnmapInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfoKHR.memory ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & memoryWin32HandlePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryWin32HandlePropertiesKHR.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const & memoryZirconHandlePropertiesFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryZirconHandlePropertiesFUCHSIA.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & metalSurfaceCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, metalSurfaceCreateInfoEXT.pLayer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const & micromapBuildSizesInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.micromapSize ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.buildScratchSize ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.discardable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & micromapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.createFlags ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.offset ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const & micromapTriangleEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.dataOffset ); + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.subdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const & micromapVersionInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pVersionData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT const & multiDrawIndexedInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.firstIndex ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.indexCount ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawIndexedInfoEXT.vertexOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT const & multiDrawInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiDrawInfoEXT.firstVertex ); + VULKAN_HPP_HASH_COMBINE( seed, multiDrawInfoEXT.vertexCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & multisamplePropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multisamplePropertiesEXT.maxSampleLocationGridSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT const & multisampledRenderToSingleSampledInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.multisampledRenderToSingleSampledEnable ); + VULKAN_HPP_HASH_COMBINE( seed, multisampledRenderToSingleSampledInfoEXT.rasterizationSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX const & multiviewPerViewAttributesInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.perViewAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewAttributesInfoNVX.perViewAttributesPositionXOnly ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & multiviewPerViewRenderAreasRenderPassBeginInfoQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.perViewRenderAreaCount ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.pPerViewRenderAreas ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT const & mutableDescriptorTypeListEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListEXT.descriptorTypeCount ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeListEXT.pDescriptorTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT const & mutableDescriptorTypeCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.mutableDescriptorTypeListCount ); + VULKAN_HPP_HASH_COMBINE( seed, mutableDescriptorTypeCreateInfoEXT.pMutableDescriptorTypeLists ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT const & opaqueCaptureDescriptorDataCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.opaqueCaptureDescriptorData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const & opticalFlowExecuteInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const & opticalFlowImageFormatInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.usage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const & opticalFlowImageFormatPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & opticalFlowSessionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.width ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.height ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.imageFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flowVectorFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.costFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.outputGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.hintGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.performanceLevel ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const & opticalFlowSessionCreatePrivateDataInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.id ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pPrivateData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV const & outOfBandQueueTypeInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.queueType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.desiredPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.actualPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.earliestPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentMargin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & performanceConfigurationAcquireInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.type ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & performanceCounterDescriptionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.flags ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.category[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.description[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & performanceCounterKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.unit ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.scope ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.storage ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.uuid[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & performanceMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & performanceOverrideInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.type ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.enable ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.parameter ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & performanceQuerySubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.counterPassIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & performanceStreamMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & physicalDevice16BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.uniformAndStorageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storagePushConstant16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageInputOutput16 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const & physicalDevice4444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.formatA4R4G4B4 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.formatA4B4G4R4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const & physicalDevice8BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.storageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.uniformAndStorageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice8BitStorageFeatures.storagePushConstant8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & physicalDeviceASTCDecodeFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceASTCDecodeFeaturesEXT.decodeModeSharedExponent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const & physicalDeviceAccelerationStructureFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureIndirectBuild ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.accelerationStructureHostCommands ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructureFeaturesKHR.descriptorBindingAccelerationStructureUpdateAfterBind ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const & physicalDeviceAccelerationStructurePropertiesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxGeometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxInstanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPrimitiveCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.maxDescriptorSetUpdateAfterBindAccelerationStructures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAccelerationStructurePropertiesKHR.minAccelerationStructureScratchOffsetAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const & physicalDeviceAddressBindingReportFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.reportAddressBinding ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC const & physicalDeviceAmigoProfilingFeaturesSEC ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAmigoProfilingFeaturesSEC.amigoProfiling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & + physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.attachmentFeedbackLoopDynamicState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & + physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.attachmentFeedbackLoopLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & physicalDeviceBlendOperationAdvancedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedFeaturesEXT.advancedBlendCoherentOperations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & physicalDeviceBlendOperationAdvancedPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendMaxColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendIndependentBlend ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedSrcColor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendNonPremultipliedDstColor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendCorrelatedOverlap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBlendOperationAdvancedPropertiesEXT.advancedBlendAllOperations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT const & physicalDeviceBorderColorSwizzleFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.borderColorSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBorderColorSwizzleFeaturesEXT.borderColorSwizzleFromImage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const & physicalDeviceBufferDeviceAddressFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddressMultiDevice ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & physicalDeviceBufferDeviceAddressFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceBufferDeviceAddressFeaturesEXT.bufferDeviceAddressMultiDevice ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & physicalDeviceClusterCullingShaderFeaturesHUAWEI ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.clustercullingShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.multiviewClusterCullingShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & physicalDeviceClusterCullingShaderPropertiesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.pNext ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxWorkGroupCount[i] ); + } + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxOutputClusterCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.indirectBufferOffsetAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.clusterShadingRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & physicalDeviceCoherentMemoryFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.deviceCoherentMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const & physicalDeviceColorWriteEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.colorWriteEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & physicalDeviceComputeShaderDerivativesFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupQuads ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupLinear ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & physicalDeviceConditionalRenderingFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.conditionalRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.inheritedConditionalRendering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & + physicalDeviceConservativeRasterizationPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.maxExtraPrimitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.extraPrimitiveOverestimationSizeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveUnderestimation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativePointAndLineRasterization ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateTrianglesRasterized ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateLinesRasterized ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.fullyCoveredFragmentShaderInputVariable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativeRasterizationPostDepthCoverage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR const & physicalDeviceCooperativeMatrixFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.cooperativeMatrix ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.cooperativeMatrixRobustBufferAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & physicalDeviceCooperativeMatrixFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrix ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrixRobustBufferAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR const & physicalDeviceCooperativeMatrixPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.cooperativeMatrixSupportedStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & physicalDeviceCooperativeMatrixPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.cooperativeMatrixSupportedStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV const & physicalDeviceCopyMemoryIndirectFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.indirectCopy ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV const & physicalDeviceCopyMemoryIndirectPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.supportedQueues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & physicalDeviceCornerSampledImageFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCornerSampledImageFeaturesNV.cornerSampledImage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & physicalDeviceCoverageReductionModeFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoverageReductionModeFeaturesNV.coverageReductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM const & physicalDeviceCubicClampFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.cubicRangeClamp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM const & physicalDeviceCubicWeightsFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.selectableCubicWeights ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV const & physicalDeviceCudaKernelLaunchFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.cudaKernelLaunchFeatures ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV const & physicalDeviceCudaKernelLaunchPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMinor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMajor ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const & physicalDeviceCustomBorderColorFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.customBorderColors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorFeaturesEXT.customBorderColorWithoutFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const & physicalDeviceCustomBorderColorPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCustomBorderColorPropertiesEXT.maxCustomBorderColorSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & + physicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDedicatedAllocationImageAliasingFeaturesNV.dedicatedAllocationImageAliasing ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT const & physicalDeviceDepthBiasControlFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.depthBiasControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.leastRepresentableValueForceUnormRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.floatRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.depthBiasExact ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT const & physicalDeviceDepthClampZeroOneFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.depthClampZeroOne ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT const & physicalDeviceDepthClipControlFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipControlFeaturesEXT.depthClipControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & physicalDeviceDepthClipEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClipEnableFeaturesEXT.depthClipEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const & physicalDeviceDepthStencilResolveProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.supportedDepthResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.supportedStencilResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.independentResolveNone ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthStencilResolveProperties.independentResolve ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & + physicalDeviceDescriptorBufferDensityMapPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.combinedImageSamplerDensityMapDescriptorSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT const & physicalDeviceDescriptorBufferFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferImageLayoutIgnored ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferPushDescriptors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT const & physicalDeviceDescriptorBufferPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.combinedImageSamplerDescriptorSingleArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.bufferlessPushDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.allowSamplerImageViewPostSubmitCreation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.descriptorBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxResourceDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxSamplerDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxEmbeddedImmutableSamplerBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxEmbeddedImmutableSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.bufferCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.imageCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.imageViewCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.accelerationStructureCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.combinedImageSamplerDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.sampledImageDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageImageDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.uniformTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustUniformTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustStorageTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.uniformBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustUniformBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustStorageBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.inputAttachmentDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.accelerationStructureDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxSamplerDescriptorBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxResourceDescriptorBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerDescriptorBufferAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.resourceDescriptorBufferAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.descriptorBufferAddressSpaceSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & physicalDeviceDescriptorIndexingFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderInputAttachmentArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderUniformTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.shaderStorageTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUniformTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingStorageTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingPartiallyBound ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.descriptorBindingVariableDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingFeatures.runtimeDescriptorArray ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const & physicalDeviceDescriptorIndexingProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxUpdateAfterBindDescriptorsInAllPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderUniformBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderSampledImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderStorageBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderStorageImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.shaderInputAttachmentArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.robustBufferAccessUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.quadDivergentImplicitLod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageDescriptorUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxPerStageUpdateAfterBindResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorIndexingProperties.maxDescriptorSetUpdateAfterBindInputAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & + physicalDeviceDescriptorPoolOverallocationFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.descriptorPoolOverallocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & physicalDeviceDescriptorSetHostMappingFeaturesVALVE ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorSetHostMappingFeaturesVALVE.descriptorSetHostMapping ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & + physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedCompute ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputePipelines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputeCaptureReplay ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & physicalDeviceDeviceGeneratedCommandsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesNV.deviceGeneratedCommands ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & physicalDeviceDeviceGeneratedCommandsPropertiesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxGraphicsShaderGroupCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsTokenOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.maxIndirectCommandsStreamStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesCountBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minSequencesIndexBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesNV.minIndirectCommandsBufferOffsetAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const & physicalDeviceDeviceMemoryReportFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceMemoryReportFeaturesEXT.deviceMemoryReport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const & physicalDeviceDiagnosticsConfigFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiagnosticsConfigFeaturesNV.diagnosticsConfig ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & physicalDeviceDiscardRectanglePropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDiscardRectanglePropertiesEXT.maxDiscardRectangles ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV const & physicalDeviceDisplacementMicromapFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.displacementMicromap ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV const & physicalDeviceDisplacementMicromapPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.maxDisplacementMicromapSubdivisionLevel ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & physicalDeviceDriverProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverID ); + for ( size_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverName[i] ); + } + for ( size_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.driverInfo[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDriverProperties.conformanceVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT const & physicalDeviceDrmPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.hasPrimary ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.hasRender ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.primaryMajor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.primaryMinor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.renderMajor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDrmPropertiesEXT.renderMinor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures const & physicalDeviceDynamicRenderingFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingFeatures.dynamicRendering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & + physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.dynamicRenderingUnusedAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & physicalDeviceExclusiveScissorFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExclusiveScissorFeaturesNV.exclusiveScissor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const & physicalDeviceExtendedDynamicState2FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2LogicOp ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState2FeaturesEXT.extendedDynamicState2PatchControlPoints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const & physicalDeviceExtendedDynamicState3FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3TessellationDomainOrigin ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClampEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3PolygonMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToCoverageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToOneEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LogicOpEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEquation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorWriteMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationStream ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ConservativeRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ExtraPrimitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleLocationsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendAdvanced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ProvokingVertexMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineStippleEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipNegativeOneToOne ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportWScalingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorLocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTableEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageReductionMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RepresentativeFragmentTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ShadingRateImageEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const & physicalDeviceExtendedDynamicState3PropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.dynamicPrimitiveTopologyUnrestricted ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const & physicalDeviceExtendedDynamicStateFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicStateFeaturesEXT.extendedDynamicState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & physicalDeviceExtendedSparseAddressSpaceFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.extendedSparseAddressSpace ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & + physicalDeviceExtendedSparseAddressSpacePropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseImageUsageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseBufferUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & physicalDeviceExternalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalBufferInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & physicalDeviceExternalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFenceInfo.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID const & physicalDeviceExternalFormatResolveFeaturesANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.externalFormatResolve ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID const & + physicalDeviceExternalFormatResolvePropertiesANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.nullColorAttachmentWithExternalFormatResolve ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.externalFormatResolveChromaOffsetX ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.externalFormatResolveChromaOffsetY ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & physicalDeviceExternalImageFormatInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalImageFormatInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & physicalDeviceExternalMemoryHostPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryHostPropertiesEXT.minImportedHostPointerAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV const & physicalDeviceExternalMemoryRDMAFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryRDMAFeaturesNV.externalMemoryRDMA ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & physicalDeviceExternalMemoryScreenBufferFeaturesQNX ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.screenBufferImport ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & physicalDeviceExternalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalSemaphoreInfo.handleType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const & physicalDeviceFaultFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFault ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFaultVendorBinary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & physicalDeviceFeatures2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFeatures2.features ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const & physicalDeviceFloatControlsProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.denormBehaviorIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.roundingModeIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderSignedZeroInfNanPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderDenormFlushToZeroFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTEFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFloatControlsProperties.shaderRoundingModeRTZFloat64 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const & physicalDeviceFragmentDensityMap2FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2FeaturesEXT.fragmentDensityMapDeferred ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const & physicalDeviceFragmentDensityMap2PropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledLoads ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.subsampledCoarseReconstructionEarlyAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxSubsampledArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMap2PropertiesEXT.maxDescriptorSetSubsampledSamplers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & physicalDeviceFragmentDensityMapFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapFeaturesEXT.fragmentDensityMapNonSubsampledImages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & physicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetFeaturesQCOM.fragmentDensityMapOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & + physicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapOffsetPropertiesQCOM.fragmentDensityOffsetGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & physicalDeviceFragmentDensityMapPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.minFragmentDensityTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.maxFragmentDensityTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentDensityMapPropertiesEXT.fragmentDensityInvocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & physicalDeviceFragmentShaderBarycentricFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricFeaturesKHR.fragmentShaderBarycentric ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & + physicalDeviceFragmentShaderBarycentricPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderBarycentricPropertiesKHR.triStripVertexOrderIndependentOfProvokingVertex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & physicalDeviceFragmentShaderInterlockFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderSampleInterlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderPixelInterlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShaderInterlockFeaturesEXT.fragmentShaderShadingRateInterlock ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & physicalDeviceFragmentShadingRateEnumsFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.fragmentShadingRateEnums ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.supersampleFragmentShadingRates ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsFeaturesNV.noInvocationFragmentShadingRates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & physicalDeviceFragmentShadingRateEnumsPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateEnumsPropertiesNV.maxFragmentShadingRateInvocationCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const & physicalDeviceFragmentShadingRateFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.pipelineFragmentShadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.primitiveFragmentShadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateFeaturesKHR.attachmentFragmentShadingRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const & physicalDeviceFragmentShadingRateKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.sampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRateKHR.fragmentSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const & physicalDeviceFragmentShadingRatePropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.minFragmentShadingRateAttachmentTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.primitiveFragmentShadingRateWithMultipleViewports ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.layeredShadingRateAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateNonTrivialCombinerOps ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentSizeAspectRatio ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateCoverageSamples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.maxFragmentShadingRateRasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderDepthStencilWrites ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithSampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithShaderSampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithConservativeRasterization ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithFragmentShaderInterlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateWithCustomSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFragmentShadingRatePropertiesKHR.fragmentShadingRateStrictMultiplyCombiner ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT const & physicalDeviceFrameBoundaryFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.frameBoundary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & physicalDeviceGlobalPriorityQueryFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.globalPriorityQuery ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & physicalDeviceGraphicsPipelineLibraryFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryFeaturesEXT.graphicsPipelineLibrary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & physicalDeviceGraphicsPipelineLibraryPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.graphicsPipelineLibraryFastLinking ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGraphicsPipelineLibraryPropertiesEXT.graphicsPipelineLibraryIndependentInterpolationDecoration ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & physicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDeviceCount ); + for ( size_t i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.physicalDevices[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGroupProperties.subsetAllocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT const & physicalDeviceHostImageCopyFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.hostImageCopy ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT const & physicalDeviceHostImageCopyPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.copySrcLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pCopySrcLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.copyDstLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pCopyDstLayouts ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.optimalTilingLayoutUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.identicalMemoryTypeRequirements ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & physicalDeviceHostQueryResetFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostQueryResetFeatures.hostQueryReset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & physicalDeviceIDProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceUUID[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.driverUUID[i] ); + } + for ( size_t i = 0; i < VK_LUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceLUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceNodeMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIDProperties.deviceLUIDValid ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT const & physicalDeviceImage2DViewOf3DFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.image2DViewOf3D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImage2DViewOf3DFeaturesEXT.sampler2DViewOf3D ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT const & physicalDeviceImageCompressionControlFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlFeaturesEXT.imageCompressionControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & + physicalDeviceImageCompressionControlSwapchainFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageCompressionControlSwapchainFeaturesEXT.imageCompressionControlSwapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & physicalDeviceImageDrmFormatModifierInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.drmFormatModifier ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.sharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageDrmFormatModifierInfoEXT.pQueueFamilyIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & physicalDeviceImageFormatInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.format ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.type ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.tiling ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageFormatInfo2.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM const & physicalDeviceImageProcessing2FeaturesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.textureBlockMatch2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM const & physicalDeviceImageProcessing2PropertiesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.maxBlockMatchWindow ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM const & physicalDeviceImageProcessingFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureSampleWeighted ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureBoxFilter ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingFeaturesQCOM.textureBlockMatch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM const & physicalDeviceImageProcessingPropertiesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxWeightFilterPhases ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxWeightFilterDimension ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxBlockMatchRegion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessingPropertiesQCOM.maxBoxFilterBlockSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures const & physicalDeviceImageRobustnessFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageRobustnessFeatures.robustImageAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & physicalDeviceImageSlicedViewOf3DFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.imageSlicedViewOf3D ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & physicalDeviceImageViewImageFormatInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewImageFormatInfoEXT.imageViewType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT const & physicalDeviceImageViewMinLodFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageViewMinLodFeaturesEXT.minLod ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const & physicalDeviceImagelessFramebufferFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImagelessFramebufferFeatures.imagelessFramebuffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & physicalDeviceIndexTypeUint8FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.indexTypeUint8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const & physicalDeviceInheritedViewportScissorFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInheritedViewportScissorFeaturesNV.inheritedViewportScissor2D ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures const & physicalDeviceInlineUniformBlockFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.inlineUniformBlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockFeatures.descriptorBindingInlineUniformBlockUpdateAfterBind ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties const & physicalDeviceInlineUniformBlockProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxInlineUniformBlockSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInlineUniformBlockProperties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI const & physicalDeviceInvocationMaskFeaturesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceInvocationMaskFeaturesHUAWEI.invocationMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT const & physicalDeviceLayeredDriverPropertiesMSFT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.underlyingAPI ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const & physicalDeviceLegacyDitheringFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.legacyDithering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const & physicalDeviceLimits ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension1D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension2D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimension3D ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageDimensionCube ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelBufferElements ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxUniformBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxStorageBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPushConstantsSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxMemoryAllocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerAllocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.bufferImageGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sparseAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxBoundDescriptorSets ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageDescriptorInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxPerStageResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDescriptorSetInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputAttributes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputAttributeOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexInputBindingStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxVertexOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationGenerationLevel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationPatchSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerVertexInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerVertexOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlPerPatchOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationControlTotalOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationEvaluationInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTessellationEvaluationOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryShaderInvocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryOutputVertices ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxGeometryTotalOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentInputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentOutputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentDualSrcAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFragmentCombinedOutputResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeSharedMemorySize ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxComputeWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subPixelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subTexelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.mipmapPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDrawIndexedIndexValue ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxDrawIndirectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSamplerAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxViewports ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxViewportDimensions[i] ); + } + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.viewportBoundsRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.viewportSubPixelBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minMemoryMapAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minUniformBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minStorageBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minTexelGatherOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxTexelGatherOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.minInterpolationOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxInterpolationOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.subPixelInterpolationOffsetBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxFramebufferLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferColorSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferDepthSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferStencilSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.framebufferNoAttachmentsSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageColorSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageIntegerSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageDepthSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.sampledImageStencilSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.storageImageSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxSampleMaskWords ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.timestampComputeAndGraphics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.timestampPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxClipDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxCullDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.maxCombinedClipAndCullDistances ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.discreteQueuePriorities ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.pointSizeRange[i] ); + } + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.lineWidthRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.pointSizeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.lineWidthGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.strictLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.standardSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.optimalBufferCopyOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.optimalBufferCopyRowPitchAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLimits.nonCoherentAtomSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & physicalDeviceLineRasterizationFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.rectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.bresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.smoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledRectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledBresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledSmoothLines ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & physicalDeviceLineRasterizationPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.lineSubPixelPrecisionBits ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV const & physicalDeviceLinearColorAttachmentFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLinearColorAttachmentFeaturesNV.linearColorAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & physicalDeviceMaintenance3Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.maxPerSetDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance3Properties.maxMemoryAllocationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features const & physicalDeviceMaintenance4Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Features.maintenance4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties const & physicalDeviceMaintenance4Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance4Properties.maxBufferSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR const & physicalDeviceMaintenance5FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.maintenance5 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR const & physicalDeviceMaintenance5PropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentMultisampleCoverageAfterSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentSampleMaskTestBeforeSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.depthStencilSwizzleOneSupport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.polygonModePointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictSinglePixelWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictWideLinesUseParallelogram ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6FeaturesKHR const & physicalDeviceMaintenance6FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6FeaturesKHR.maintenance6 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6PropertiesKHR const & physicalDeviceMaintenance6PropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.blockTexelViewCompatibleMultipleLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.maxCombinedImageSamplerDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.fragmentShadingRateClampCombinerInputs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & physicalDeviceMemoryBudgetPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.heapBudget[i] ); + } + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryBudgetPropertiesEXT.heapUsage[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV const & physicalDeviceMemoryDecompressionFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.memoryDecompression ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV const & physicalDeviceMemoryDecompressionPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.decompressionMethods ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.maxDecompressionIndirectCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & physicalDeviceMemoryPriorityFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryPriorityFeaturesEXT.memoryPriority ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const & physicalDeviceMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypeCount ); + for ( size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryTypes[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryHeapCount ); + for ( size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties.memoryHeaps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & physicalDeviceMemoryProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryProperties2.memoryProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT const & physicalDeviceMeshShaderFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.taskShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.meshShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.multiviewMeshShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.primitiveFragmentShadingRateMeshShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesEXT.meshShaderQueries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & physicalDeviceMeshShaderFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.taskShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderFeaturesNV.meshShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT const & physicalDeviceMeshShaderPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupTotalCount ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskSharedMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxTaskPayloadAndSharedMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupTotalCount ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshSharedMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshPayloadAndSharedMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshPayloadAndOutputMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputComponents ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputVertices ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputPrimitives ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshOutputLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxMeshMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.meshOutputPerVertexGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.meshOutputPerPrimitiveGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxPreferredTaskWorkGroupInvocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.maxPreferredMeshWorkGroupInvocations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersLocalInvocationVertexOutput ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersLocalInvocationPrimitiveOutput ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersCompactVertexOutput ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesEXT.prefersCompactPrimitiveOutput ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & physicalDeviceMeshShaderPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxDrawMeshTasksCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskTotalMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxTaskOutputCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshWorkGroupInvocations ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshTotalMemorySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshOutputVertices ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshOutputPrimitives ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.maxMeshMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.meshOutputPerVertexGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMeshShaderPropertiesNV.meshOutputPerPrimitiveGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT const & physicalDeviceMultiDrawFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawFeaturesEXT.multiDraw ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT const & physicalDeviceMultiDrawPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiDrawPropertiesEXT.maxMultiDrawCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & + physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.multisampledRenderToSingleSampled ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & physicalDeviceMultiviewFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiview ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiviewGeometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewFeatures.multiviewTessellationShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & + physicalDeviceMultiviewPerViewAttributesPropertiesNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewAttributesPropertiesNVX.perViewPositionAllComponents ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & + physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.multiviewPerViewRenderAreas ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & physicalDeviceMultiviewPerViewViewportsFeaturesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.multiviewPerViewViewports ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & physicalDeviceMultiviewProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.maxMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewProperties.maxMultiviewInstanceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & physicalDeviceMutableDescriptorTypeFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMutableDescriptorTypeFeaturesEXT.mutableDescriptorType ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT const & physicalDeviceNestedCommandBufferFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBufferRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBufferSimultaneousUse ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT const & physicalDeviceNestedCommandBufferPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.maxCommandBufferNestingLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & physicalDeviceNonSeamlessCubeMapFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.nonSeamlessCubeMap ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const & physicalDeviceOpacityMicromapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapHostCommands ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const & physicalDeviceOpacityMicromapPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity2StateSubdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity4StateSubdivisionLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const & physicalDeviceOpticalFlowFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.opticalFlow ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const & physicalDeviceOpticalFlowPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedOutputGridSizes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedHintGridSizes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.hintSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.costSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.bidirectionalFlowSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.globalFlowSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxNumRegionsOfInterest ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & physicalDevicePCIBusInfoPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDomain ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciBus ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDevice ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciFunction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & physicalDevicePageableDeviceLocalMemoryFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pageableDeviceLocalMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV const & physicalDevicePerStageDescriptorSetFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.perStageDescriptorSet ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.dynamicPipelineLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & physicalDevicePerformanceQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterQueryPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterMultipleQueryPools ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & physicalDevicePerformanceQueryPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.allowCommandBufferQueryCopies ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const & physicalDevicePipelineCreationCacheControlFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineCreationCacheControlFeatures.pipelineCreationCacheControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & + physicalDevicePipelineExecutablePropertiesFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineExecutablePropertiesFeaturesKHR.pipelineExecutableInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & + physicalDevicePipelineLibraryGroupHandlesFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.pipelineLibraryGroupHandles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT const & physicalDevicePipelinePropertiesFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelinePropertiesFeaturesEXT.pipelinePropertiesIdentifier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT const & physicalDevicePipelineProtectedAccessFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pipelineProtectedAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT const & physicalDevicePipelineRobustnessFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pipelineRobustness ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT const & physicalDevicePipelineRobustnessPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessVertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessImages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & physicalDevicePointClippingProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePointClippingProperties.pointClippingBehavior ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const & physicalDevicePortabilitySubsetFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.constantAlphaColorBlendFactors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.events ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageViewFormatReinterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageViewFormatSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.imageView2DOn3DImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.multisampleArrayImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.mutableComparisonSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.pointPolygons ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.samplerMipLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.separateStencilMaskRef ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.shaderSampleRateInterpolationFunctions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationIsolines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.tessellationPointMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.triangleFans ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetFeaturesKHR.vertexAttributeAccessBeyondStride ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const & physicalDevicePortabilitySubsetPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePortabilitySubsetPropertiesKHR.minVertexInputBindingStrideAlignment ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const & physicalDevicePresentBarrierFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.presentBarrier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const & physicalDevicePresentIdFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentIdFeaturesKHR.presentId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const & physicalDevicePresentWaitFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWaitFeaturesKHR.presentWait ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & + physicalDevicePrimitiveTopologyListRestartFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyListRestart ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitiveTopologyListRestartFeaturesEXT.primitiveTopologyPatchListRestart ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & physicalDevicePrimitivesGeneratedQueryFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQueryWithRasterizerDiscard ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrimitivesGeneratedQueryFeaturesEXT.primitivesGeneratedQueryWithNonZeroStreams ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures const & physicalDevicePrivateDataFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePrivateDataFeatures.privateData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DMultisampleBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard3DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyAlignedMipSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyNonResidentStrict ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.driverVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceType ); + for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceName[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.pipelineCacheUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.limits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.sparseProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & physicalDeviceProtectedMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryFeatures.protectedMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & physicalDeviceProtectedMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProtectedMemoryProperties.protectedNoFault ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const & physicalDeviceProvokingVertexFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.provokingVertexLast ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexFeaturesEXT.transformFeedbackPreservesProvokingVertex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const & physicalDeviceProvokingVertexPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.provokingVertexModePerPipeline ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProvokingVertexPropertiesEXT.transformFeedbackPreservesTriangleFanProvokingVertex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & physicalDevicePushDescriptorPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.maxPushDescriptors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & physicalDeviceRGBA10X6FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRGBA10X6FormatsFeaturesEXT.formatRgba10x6WithoutYCbCrSampler ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & + physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderColorAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderDepthAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.rasterizationOrderStencilAttachmentAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const & physicalDeviceRayQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayQueryFeaturesKHR.rayQuery ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & physicalDeviceRayTracingInvocationReorderFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.rayTracingInvocationReorder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & + physicalDeviceRayTracingInvocationReorderPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.rayTracingInvocationReorderReorderingHint ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & physicalDeviceRayTracingMaintenance1FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.rayTracingMaintenance1 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMaintenance1FeaturesKHR.rayTracingPipelineTraceRaysIndirect2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV const & physicalDeviceRayTracingMotionBlurFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlur ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingMotionBlurFeaturesNV.rayTracingMotionBlurPipelineTraceRaysIndirect ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const & physicalDeviceRayTracingPipelineFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipeline ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTracingPipelineTraceRaysIndirect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelineFeaturesKHR.rayTraversalPrimitiveCulling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const & physicalDeviceRayTracingPipelinePropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxShaderGroupStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupBaseAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleCaptureReplaySize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayDispatchInvocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.shaderGroupHandleAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPipelinePropertiesKHR.maxRayHitAttributeSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & physicalDeviceRayTracingPositionFetchFeaturesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.rayTracingPositionFetch ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & physicalDeviceRayTracingPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.shaderGroupHandleSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxShaderGroupStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.shaderGroupBaseAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxGeometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxInstanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxTriangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPropertiesNV.maxDescriptorSetAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & physicalDeviceRelaxedLineRasterizationFeaturesIMG ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.relaxedLineRasterization ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM const & physicalDeviceRenderPassStripedFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.renderPassStriped ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM const & physicalDeviceRenderPassStripedPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.renderPassStripeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.maxRenderPassStripes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & physicalDeviceRepresentativeFragmentTestFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRepresentativeFragmentTestFeaturesNV.representativeFragmentTest ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const & physicalDeviceRobustness2FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.robustBufferAccess2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.robustImageAccess2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2FeaturesEXT.nullDescriptor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const & physicalDeviceRobustness2PropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.robustStorageBufferAccessSizeAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRobustness2PropertiesEXT.robustUniformBufferAccessSizeAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & physicalDeviceSampleLocationsPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationSampleCounts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.maxSampleLocationGridSize ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationCoordinateRange[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.sampleLocationSubPixelBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSampleLocationsPropertiesEXT.variableSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const & physicalDeviceSamplerFilterMinmaxProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.filterMinmaxSingleComponentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerFilterMinmaxProperties.filterMinmaxImageComponentMapping ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & physicalDeviceSamplerYcbcrConversionFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSamplerYcbcrConversionFeatures.samplerYcbcrConversion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const & physicalDeviceScalarBlockLayoutFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceScalarBlockLayoutFeatures.scalarBlockLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM const & physicalDeviceSchedulingControlsFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.schedulingControls ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM const & physicalDeviceSchedulingControlsPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.schedulingControlsFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & physicalDeviceSeparateDepthStencilLayoutsFeatures ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSeparateDepthStencilLayoutsFeatures.separateDepthStencilLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & physicalDeviceShaderAtomicFloat2FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat16AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderBufferFloat64AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat16AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderSharedFloat64AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.shaderImageFloat32AtomicMinMax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat2FeaturesEXT.sparseImageFloat32AtomicMinMax ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const & physicalDeviceShaderAtomicFloatFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderBufferFloat64AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderSharedFloat64AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderImageFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.shaderImageFloat32AtomicAdd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sparseImageFloat32Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloatFeaturesEXT.sparseImageFloat32AtomicAdd ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const & physicalDeviceShaderAtomicInt64Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.shaderBufferInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicInt64Features.shaderSharedInt64Atomics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & physicalDeviceShaderClockFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.shaderSubgroupClock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderClockFeaturesKHR.shaderDeviceClock ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & physicalDeviceShaderCoreBuiltinsFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.shaderCoreBuiltins ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & physicalDeviceShaderCoreBuiltinsPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderWarpsPerCore ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & physicalDeviceShaderCoreProperties2AMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.shaderCoreFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreProperties2AMD.activeComputeUnitCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & physicalDeviceShaderCorePropertiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.shaderEngineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.shaderArraysPerEngineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.computeUnitsPerShaderArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.simdPerComputeUnit ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.wavefrontsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.wavefrontSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sgprsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.minSgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.maxSgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.sgprAllocationGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.vgprsPerSimd ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.minVgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.maxVgprAllocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesAMD.vgprAllocationGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM const & physicalDeviceShaderCorePropertiesARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.pixelRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.texelRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.fmaRate ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & + physicalDeviceShaderDemoteToHelperInvocationFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDemoteToHelperInvocationFeatures.shaderDemoteToHelperInvocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & physicalDeviceShaderDrawParametersFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderDrawParametersFeatures.shaderDrawParameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & + physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.shaderEarlyAndLateFragmentTests ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX const & physicalDeviceShaderEnqueueFeaturesAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderEnqueue ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX const & physicalDeviceShaderEnqueuePropertiesAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderOutputNodes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.executionGraphDispatchAddressAlignment ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const & physicalDeviceShaderFloat16Int8Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.shaderFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat16Int8Features.shaderInt8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & physicalDeviceShaderImageAtomicInt64FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.shaderImageInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageAtomicInt64FeaturesEXT.sparseImageInt64Atomics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & physicalDeviceShaderImageFootprintFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderImageFootprintFeaturesNV.imageFootprint ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures const & physicalDeviceShaderIntegerDotProductFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductFeatures.shaderIntegerDotProduct ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties const & physicalDeviceShaderIntegerDotProductProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProduct64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, + physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerDotProductProperties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & physicalDeviceShaderIntegerFunctions2FeaturesINTEL ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderIntegerFunctions2FeaturesINTEL.shaderIntegerFunctions2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & physicalDeviceShaderModuleIdentifierFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierFeaturesEXT.shaderModuleIdentifier ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & physicalDeviceShaderModuleIdentifierPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderModuleIdentifierPropertiesEXT.shaderModuleIdentifierAlgorithmUUID[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const & physicalDeviceShaderObjectFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.shaderObject ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const & physicalDeviceShaderObjectPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & physicalDeviceShaderSMBuiltinsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsFeaturesNV.shaderSMBuiltins ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & physicalDeviceShaderSMBuiltinsPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.shaderSMCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSMBuiltinsPropertiesNV.shaderWarpsPerSM ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & physicalDeviceShaderSubgroupExtendedTypesFeatures ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & + physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.shaderSubgroupUniformControlFlow ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures const & physicalDeviceShaderTerminateInvocationFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTerminateInvocationFeatures.shaderTerminateInvocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const & physicalDeviceShaderTileImageFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageColorReadAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageDepthReadAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageStencilReadAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const & physicalDeviceShaderTileImagePropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageCoherentReadAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadSampleFromPixelRateInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadFromHelperInvocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & physicalDeviceShadingRateImageFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.shadingRateImage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImageFeaturesNV.shadingRateCoarseSampleOrder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & physicalDeviceShadingRateImagePropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRateTexelSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRatePaletteSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShadingRateImagePropertiesNV.shadingRateMaxCoarseSamples ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & physicalDeviceSparseImageFormatInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.format ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.type ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.samples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.usage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseImageFormatInfo2.tiling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & physicalDeviceSubgroupProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.subgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.supportedStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.supportedOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupProperties.quadOperationsInAllStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures const & physicalDeviceSubgroupSizeControlFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.subgroupSizeControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlFeatures.computeFullSubgroups ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties const & physicalDeviceSubgroupSizeControlProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.minSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.maxSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.maxComputeWorkgroupSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubgroupSizeControlProperties.requiredSubgroupSizeStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & physicalDeviceSubpassMergeFeedbackFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassMergeFeedbackFeaturesEXT.subpassMergeFeedback ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI const & physicalDeviceSubpassShadingFeaturesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingFeaturesHUAWEI.subpassShading ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI const & physicalDeviceSubpassShadingPropertiesHUAWEI ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSubpassShadingPropertiesHUAWEI.maxSubpassShadingWorkgroupSizeAspectRatio ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & physicalDeviceSurfaceInfo2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSurfaceInfo2KHR.surface ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & physicalDeviceSwapchainMaintenance1FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.swapchainMaintenance1 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features const & physicalDeviceSynchronization2Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSynchronization2Features.synchronization2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & physicalDeviceTexelBufferAlignmentFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentFeaturesEXT.texelBufferAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties const & physicalDeviceTexelBufferAlignmentProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.storageTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTexelBufferAlignmentProperties.uniformTexelBufferOffsetSingleTexelAlignment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures const & physicalDeviceTextureCompressionASTCHDRFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTextureCompressionASTCHDRFeatures.textureCompressionASTC_HDR ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM const & physicalDeviceTilePropertiesFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTilePropertiesFeaturesQCOM.tileProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const & physicalDeviceTimelineSemaphoreFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreFeatures.timelineSemaphore ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const & physicalDeviceTimelineSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTimelineSemaphoreProperties.maxTimelineSemaphoreValueDifference ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties const & physicalDeviceToolProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.pNext ); + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.version[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.purposes ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.description[i] ); + } + for ( size_t i = 0; i < VK_MAX_EXTENSION_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceToolProperties.layer[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & physicalDeviceTransformFeedbackFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.transformFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackFeaturesEXT.geometryStreams ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & physicalDeviceTransformFeedbackPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreams ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackStreamDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.maxTransformFeedbackBufferDataStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackQueries ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackStreamsLinesTriangles ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackRasterizationStreamSelect ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTransformFeedbackPropertiesEXT.transformFeedbackDraw ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & physicalDeviceUniformBufferStandardLayoutFeatures ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUniformBufferStandardLayoutFeatures.uniformBufferStandardLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & physicalDeviceVariablePointersFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.variablePointersStorageBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVariablePointersFeatures.variablePointers ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & physicalDeviceVertexAttributeDivisorFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesKHR.vertexAttributeInstanceRateDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesKHR.vertexAttributeInstanceRateZeroDivisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & physicalDeviceVertexAttributeDivisorPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesEXT.maxVertexAttribDivisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & physicalDeviceVertexAttributeDivisorPropertiesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesKHR.maxVertexAttribDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesKHR.supportsNonZeroFirstInstance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & physicalDeviceVertexInputDynamicStateFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.vertexInputDynamicState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const & videoProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.videoCodecOperation ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaSubsampling ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.lumaBitDepth ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaBitDepth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & physicalDeviceVideoEncodeQualityLevelInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.pVideoProfile ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.qualityLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const & physicalDeviceVideoFormatInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoFormatInfoKHR.imageUsage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR const & physicalDeviceVideoMaintenance1FeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.videoMaintenance1 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & physicalDeviceVulkan11Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.uniformAndStorageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storagePushConstant16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.storageInputOutput16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiview ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiviewGeometryShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.multiviewTessellationShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.variablePointersStorageBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.variablePointers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.protectedMemory ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.samplerYcbcrConversion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Features.shaderDrawParameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const & physicalDeviceVulkan11Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceUUID[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.driverUUID[i] ); + } + for ( size_t i = 0; i < VK_LUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceLUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceNodeMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.deviceLUIDValid ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSupportedStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupSupportedOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.subgroupQuadOperationsInAllStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.pointClippingBehavior ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMultiviewViewCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMultiviewInstanceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.protectedNoFault ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxPerSetDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan11Properties.maxMemoryAllocationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & physicalDeviceVulkan12Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.samplerMirrorClampToEdge ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.drawIndirectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.storageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.uniformAndStorageBuffer8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.storagePushConstant8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderBufferInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSharedInt64Atomics ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInt8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInputAttachmentArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageTexelBufferArrayDynamicIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSampledImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageImageArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderInputAttachmentArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderUniformTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderStorageTexelBufferArrayNonUniformIndexing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUniformBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingSampledImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageImageUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUniformTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingStorageTexelBufferUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingUpdateUnusedWhilePending ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingPartiallyBound ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.descriptorBindingVariableDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.runtimeDescriptorArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.samplerFilterMinmax ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.scalarBlockLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.imagelessFramebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.uniformBufferStandardLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderSubgroupExtendedTypes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.separateDepthStencilLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.hostQueryReset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.timelineSemaphore ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddressCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.bufferDeviceAddressMultiDevice ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModelDeviceScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.vulkanMemoryModelAvailabilityVisibilityChains ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderOutputViewportIndex ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.shaderOutputLayer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Features.subgroupBroadcastDynamicId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const & physicalDeviceVulkan12Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverID ); + for ( size_t i = 0; i < VK_MAX_DRIVER_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverName[i] ); + } + for ( size_t i = 0; i < VK_MAX_DRIVER_INFO_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.driverInfo[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.conformanceVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.denormBehaviorIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.roundingModeIndependence ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSignedZeroInfNanPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormPreserveFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderDenormFlushToZeroFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTEFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat32 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderRoundingModeRTZFloat64 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxUpdateAfterBindDescriptorsInAllPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderUniformBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderSampledImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageBufferArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderStorageImageArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.shaderInputAttachmentArrayNonUniformIndexingNative ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.robustBufferAccessUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.quadDivergentImplicitLod ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageDescriptorUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxPerStageUpdateAfterBindResources ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindSampledImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindStorageImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxDescriptorSetUpdateAfterBindInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.supportedDepthResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.supportedStencilResolveModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.independentResolveNone ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.independentResolve ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.filterMinmaxSingleComponentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.filterMinmaxImageComponentMapping ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.maxTimelineSemaphoreValueDifference ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan12Properties.framebufferIntegerColorSampleCounts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features const & physicalDeviceVulkan13Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.robustImageAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.inlineUniformBlock ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.descriptorBindingInlineUniformBlockUpdateAfterBind ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.pipelineCreationCacheControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.privateData ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderDemoteToHelperInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderTerminateInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.subgroupSizeControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.computeFullSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.synchronization2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.textureCompressionASTC_HDR ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderZeroInitializeWorkgroupMemory ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.dynamicRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.shaderIntegerDotProduct ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Features.maintenance4 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties const & physicalDeviceVulkan13Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.minSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxSubgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxComputeWorkgroupSubgroups ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.requiredSubgroupSizeStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformBlockSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxInlineUniformTotalSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProduct64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitSignedAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.storageTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetAlignmentBytes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.uniformTexelBufferOffsetSingleTexelAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan13Properties.maxBufferSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const & physicalDeviceVulkanMemoryModelFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelDeviceScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkanMemoryModelFeatures.vulkanMemoryModelAvailabilityVisibilityChains ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & + physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayoutScalarBlockLayout ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout8BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.workgroupMemoryExplicitLayout16BitAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & physicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcr2Plane444FormatsFeaturesEXT.ycbcr2plane444Formats ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM const & physicalDeviceYcbcrDegammaFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.ycbcrDegamma ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & physicalDeviceYcbcrImageArraysFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrImageArraysFeaturesEXT.ycbcrImageArrays ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & physicalDeviceZeroInitializeWorkgroupMemoryFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceZeroInitializeWorkgroupMemoryFeatures.shaderZeroInitializeWorkgroupMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & pipelineCacheCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.initialDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheCreateInfo.pInitialData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne const & pipelineCacheHeaderVersionOne ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.headerVersion ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.deviceID ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineCacheHeaderVersionOne.pipelineCacheUUID[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & pipelineColorBlendAdvancedStateCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.srcPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.dstPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorBlendAdvancedStateCreateInfoEXT.blendOverlap ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const & pipelineColorWriteCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineColorWriteCreateInfoEXT.pColorWriteEnables ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & pipelineCompilerControlCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCompilerControlCreateInfoAMD.compilerControlFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & pipelineCoverageModulationStateCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationTableEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.coverageModulationTableCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageModulationStateCreateInfoNV.pCoverageModulationTable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & pipelineCoverageReductionStateCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageReductionStateCreateInfoNV.coverageReductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & pipelineCoverageToColorStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.coverageToColorEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCoverageToColorStateCreateInfoNV.coverageToColorLocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR const & pipelineCreateFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback const & pipelineCreationFeedback ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedback.duration ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo const & pipelineCreationFeedbackCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pPipelineCreationFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pipelineStageCreationFeedbackCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreationFeedbackCreateInfo.pPipelineStageCreationFeedbacks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & pipelineDiscardRectangleStateCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.discardRectangleMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.discardRectangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineDiscardRectangleStateCreateInfoEXT.pDiscardRectangles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & pipelineExecutableInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInfoKHR.executableIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & pipelineExecutableInternalRepresentationKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.pNext ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.isText ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutableInternalRepresentationKHR.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & pipelineExecutablePropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.stages ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineExecutablePropertiesKHR.subgroupSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const & pipelineFragmentShadingRateEnumStateCreateInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.shadingRateType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.shadingRate ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateEnumStateCreateInfoNV.combinerOps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const & pipelineFragmentShadingRateStateCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.fragmentSize ); + for ( size_t i = 0; i < 2; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineFragmentShadingRateStateCreateInfoKHR.combinerOps[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV const & pipelineIndirectDeviceAddressInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & pipelineInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & pipelineLayoutCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pSetLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLayoutCreateInfo.pPushConstantRanges ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT const & pipelinePropertiesIdentifierEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelinePropertiesIdentifierEXT.pipelineIdentifier[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & pipelineRasterizationConservativeStateCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.conservativeRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationConservativeStateCreateInfoEXT.extraPrimitiveOverestimationSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & pipelineRasterizationDepthClipStateCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationDepthClipStateCreateInfoEXT.depthClipEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & pipelineRasterizationLineStateCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.stippledLineEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineStippleFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineStipplePattern ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const & + pipelineRasterizationProvokingVertexStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationProvokingVertexStateCreateInfoEXT.provokingVertexMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & pipelineRasterizationStateRasterizationOrderAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateRasterizationOrderAMD.rasterizationOrder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & pipelineRasterizationStateStreamCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationStateStreamCreateInfoEXT.rasterizationStream ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo const & pipelineRenderingCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRenderingCreateInfo.stencilAttachmentFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & pipelineRepresentativeFragmentTestStateCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRepresentativeFragmentTestStateCreateInfoNV.representativeFragmentTestEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT const & pipelineRobustnessCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.storageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.uniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.vertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.images ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & pipelineSampleLocationsStateCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sampleLocationsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineSampleLocationsStateCreateInfoEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT const & pipelineShaderStageModuleIdentifierCreateInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.identifierSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageModuleIdentifierCreateInfoEXT.pIdentifier ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX const & pipelineShaderStageNodeCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.pNext ); + for ( const char * p = pipelineShaderStageNodeCreateInfoAMDX.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.index ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo const & pipelineShaderStageRequiredSubgroupSizeCreateInfo ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageRequiredSubgroupSizeCreateInfo.requiredSubgroupSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & pipelineTessellationDomainOriginStateCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineTessellationDomainOriginStateCreateInfo.domainOrigin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR const & vertexInputBindingDivisorDescriptionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionKHR.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionKHR.divisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoKHR const & pipelineVertexInputDivisorStateCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoKHR.vertexBindingDivisorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoKHR.pVertexBindingDivisors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & pipelineViewportCoarseSampleOrderStateCreateInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.sampleOrderType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.customSampleOrderCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportCoarseSampleOrderStateCreateInfoNV.pCustomSampleOrders ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT const & pipelineViewportDepthClipControlCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClipControlCreateInfoEXT.negativeOneToOne ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & pipelineViewportExclusiveScissorStateCreateInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.exclusiveScissorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportExclusiveScissorStateCreateInfoNV.pExclusiveScissors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const & shadingRatePaletteNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.shadingRatePaletteEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, shadingRatePaletteNV.pShadingRatePaletteEntries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & pipelineViewportShadingRateImageStateCreateInfoNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.shadingRateImageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportShadingRateImageStateCreateInfoNV.pShadingRatePalettes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const & viewportSwizzleNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.x ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.y ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.z ); + VULKAN_HPP_HASH_COMBINE( seed, viewportSwizzleNV.w ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & pipelineViewportSwizzleStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportSwizzleStateCreateInfoNV.pViewportSwizzles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ViewportWScalingNV const & viewportWScalingNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.xcoeff ); + VULKAN_HPP_HASH_COMBINE( seed, viewportWScalingNV.ycoeff ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & pipelineViewportWScalingStateCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.viewportWScalingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.viewportCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportWScalingStateCreateInfoNV.pViewportWScalings ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_GGP ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & presentFrameTokenGGP ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentFrameTokenGGP.frameToken ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_GGP*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentIdKHR const & presentIdKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentIdKHR.pPresentIds ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentInfoKHR const & presentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pSwapchains ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pImageIndices ); + VULKAN_HPP_HASH_COMBINE( seed, presentInfoKHR.pResults ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RectLayerKHR const & rectLayerKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.offset ); + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.extent ); + VULKAN_HPP_HASH_COMBINE( seed, rectLayerKHR.layer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentRegionKHR const & presentRegionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentRegionKHR.rectangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionKHR.pRectangles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & presentRegionsKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentRegionsKHR.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const & presentTimeGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimeGOOGLE.desiredPresentTime ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & presentTimesInfoGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.sType ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, presentTimesInfoGOOGLE.pTimes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & privateDataSlotCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, privateDataSlotCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & protectedSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, protectedSubmitInfo.protectedSubmit ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR const & pushConstantsInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.size ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.pValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & writeDescriptorSet ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstSet ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pImageInfo ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pBufferInfo ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pTexelBufferView ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR const & pushDescriptorSetInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.set ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.descriptorWriteCount ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.pDescriptorWrites ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR const & pushDescriptorSetWithTemplateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.descriptorUpdateTemplate ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.set ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV const & queryLowLatencySupportNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.pQueriedLowLatencyData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & queryPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryCount ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pipelineStatistics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & queryPoolPerformanceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.counterIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pCounterIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const & queryPoolPerformanceQueryCreateInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.performanceCountersSampling ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR const & queryPoolVideoEncodeFeedbackCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.encodeFeedbackFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const & queueFamilyCheckpointProperties2NV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointProperties2NV.checkpointExecutionStageMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & queueFamilyCheckpointPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyCheckpointPropertiesNV.checkpointExecutionStageMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const & queueFamilyGlobalPriorityPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorityCount ); + for ( size_t i = 0; i < VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorities[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties const & queueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueFlags ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.queueCount ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.timestampValidBits ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties.minImageTransferGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & queueFamilyProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyProperties2.queueFamilyProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR const & queueFamilyQueryResultStatusPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyQueryResultStatusPropertiesKHR.queryResultStatusSupport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR const & queueFamilyVideoPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyVideoPropertiesKHR.videoCodecOperations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const & rayTracingShaderGroupCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.type ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.generalShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.closestHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.anyHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.intersectionShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoKHR.pShaderGroupCaptureReplayHandle ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const & rayTracingPipelineInterfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.maxPipelineRayPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineInterfaceCreateInfoKHR.maxPipelineRayHitAttributeSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & rayTracingPipelineCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.maxPipelineRayRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pLibraryInfo ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pLibraryInterface ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.pDynamicState ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.layout ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoKHR.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & rayTracingShaderGroupCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.generalShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.closestHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.anyHitShader ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingShaderGroupCreateInfoNV.intersectionShader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rayTracingPipelineCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.groupCount ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.pGroups ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.maxRecursionDepth ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.layout ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineCreateInfoNV.basePipelineIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const & refreshCycleDurationGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, refreshCycleDurationGOOGLE.refreshDuration ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT const & releaseSwapchainImagesInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.imageIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.pImageIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & renderPassAttachmentBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassAttachmentBeginInfo.pAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & renderPassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.framebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.renderArea ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.clearValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassBeginInfo.pClearValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDescription const & subpassDescription ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.flags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.inputAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pResolveAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pDepthStencilAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.preserveAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription.pPreserveAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDependency const & subpassDependency ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency.dependencyFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & renderPassCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pSubpasses ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo.pDependencies ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDescription2 const & subpassDescription2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.inputAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pInputAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pResolveAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pDepthStencilAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.preserveAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescription2.pPreserveAttachments ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassDependency2 const & subpassDependency2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstSubpass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.srcAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dstAccessMask ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.dependencyFlags ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDependency2.viewOffset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & renderPassCreateInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.attachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pSubpasses ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pDependencies ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.correlatedViewMaskCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreateInfo2.pCorrelatedViewMasks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT const & renderPassCreationControlEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationControlEXT.disallowMerging ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT const & renderPassCreationFeedbackInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackInfoEXT.postMergeSubpassCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT const & renderPassCreationFeedbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassCreationFeedbackCreateInfoEXT.pRenderPassFeedback ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & renderPassFragmentDensityMapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassFragmentDensityMapCreateInfoEXT.fragmentDensityMapAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & renderPassInputAttachmentAspectCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.aspectReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassInputAttachmentAspectCreateInfo.pAspectReferences ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & renderPassMultiviewCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.subpassCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pViewMasks ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.dependencyCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pViewOffsets ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.correlationMaskCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassMultiviewCreateInfo.pCorrelationMasks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const & subpassSampleLocationsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.subpassIndex ); + VULKAN_HPP_HASH_COMBINE( seed, subpassSampleLocationsEXT.sampleLocationsInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & renderPassSampleLocationsBeginInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.attachmentInitialSampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pAttachmentInitialSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.postSubpassSampleLocationsCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSampleLocationsBeginInfoEXT.pPostSubpassSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM const & renderPassStripeInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.stripeArea ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM const & renderPassStripeBeginInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.stripeInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.pStripeInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const & semaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.value ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.stageMask ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.deviceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM const & renderPassStripeSubmitInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.stripeSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.pStripeSemaphoreInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT const & renderPassSubpassFeedbackInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.subpassMergeStatus ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackInfoEXT.postMergeIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT const & renderPassSubpassFeedbackCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassSubpassFeedbackCreateInfoEXT.pSubpassFeedback ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const & renderPassTransformBeginInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassTransformBeginInfoQCOM.transform ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR const & renderingAreaInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.stencilAttachmentFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT const & renderingFragmentDensityMapAttachmentInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentDensityMapAttachmentInfoEXT.imageLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR const & renderingFragmentShadingRateAttachmentInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.imageView ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.imageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, renderingFragmentShadingRateAttachmentInfoKHR.shadingRateAttachmentTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingInfo const & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.renderArea ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pColorAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pDepthAttachment ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInfo.pStencilAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 const & resolveImageInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, resolveImageInfo2.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM const & samplerBlockMatchWindowCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.windowExtent ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.windowCompareMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT const & samplerBorderColorComponentMappingCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.components ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBorderColorComponentMappingCreateInfoEXT.srgb ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT const & samplerCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.sampler ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & samplerCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.magFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipmapMode ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeU ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeV ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.addressModeW ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.mipLodBias ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.anisotropyEnable ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxAnisotropy ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareEnable ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.compareOp ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.minLod ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.maxLod ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.borderColor ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCreateInfo.unnormalizedCoordinates ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM const & samplerCubicWeightsCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.cubicWeights ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & samplerReductionModeCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerReductionModeCreateInfo.reductionMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & samplerYcbcrConversionCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.format ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.ycbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.ycbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.components ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.xChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.yChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.chromaFilter ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionCreateInfo.forceExplicitReconstruction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & samplerYcbcrConversionImageFormatProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionImageFormatProperties.combinedImageSamplerDescriptorCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & samplerYcbcrConversionInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionInfo.conversion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.enableYDegamma ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.enableCbCrDegamma ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX const & screenBufferFormatPropertiesQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.format ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.screenUsage ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX const & screenBufferPropertiesQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & screenSurfaceCreateInfoQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.flags ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.context ); + VULKAN_HPP_HASH_COMBINE( seed, screenSurfaceCreateInfoQNX.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & semaphoreCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreCreateInfo.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & semaphoreGetFdInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetFdInfoKHR.handleType ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & semaphoreGetWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetWin32HandleInfoKHR.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const & semaphoreGetZirconHandleInfoFUCHSIA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreGetZirconHandleInfoFUCHSIA.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & semaphoreSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSignalInfo.value ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & semaphoreTypeCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.semaphoreType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreTypeCreateInfo.initialValue ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & semaphoreWaitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.semaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreWaitInfo.pValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT const & setDescriptorBufferOffsetsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.layout ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.firstSet ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.setCount ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pBufferIndices ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV const & setLatencyMarkerInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const & setStateFlagsIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, setStateFlagsIndirectCommandNV.data ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & shaderCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.stage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.nextStage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeSize ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pCode ); + for ( const char * p = shaderCreateInfoEXT.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSetLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pPushConstantRanges ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSpecializationInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & shaderModuleCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.codeSize ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pCode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const & shaderModuleIdentifierEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifierSize ); + for ( size_t i = 0; i < VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifier[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & shaderModuleValidationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.validationCache ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const & shaderResourceUsageAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.numUsedSgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.ldsSizePerLocalWorkGroup ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.ldsUsageSizeInBytes ); + VULKAN_HPP_HASH_COMBINE( seed, shaderResourceUsageAMD.scratchMemUsageInBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const & shaderStatisticsInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.shaderStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.resourceUsage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numPhysicalVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numPhysicalSgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numAvailableVgprs ); + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.numAvailableSgprs ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, shaderStatisticsInfoAMD.computeWorkGroupSize[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & sharedPresentSurfaceCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sharedPresentSurfaceCapabilitiesKHR.sharedPresentSupportedUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const & sparseImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.aspectMask ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.imageGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & sparseImageFormatProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageFormatProperties2.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const & sparseImageMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.formatProperties ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailFirstLod ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailSize ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailOffset ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements.imageMipTailStride ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & sparseImageMemoryRequirements2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, sparseImageMemoryRequirements2.memoryRequirements ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_GGP ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & streamDescriptorSurfaceCreateInfoGGP ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.sType ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.flags ); + VULKAN_HPP_HASH_COMBINE( seed, streamDescriptorSurfaceCreateInfoGGP.streamDescriptor ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_GGP*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & stridedDeviceAddressRegionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.stride ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubmitInfo const & submitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.waitSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pWaitSemaphores ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pWaitDstStageMask ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.commandBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pCommandBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.signalSemaphoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo.pSignalSemaphores ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubmitInfo2 const & submitInfo2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.flags ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.waitSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pWaitSemaphoreInfos ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.commandBufferInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pCommandBufferInfos ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.signalSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, submitInfo2.pSignalSemaphoreInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassBeginInfo const & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassBeginInfo.contents ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const & subpassDescriptionDepthStencilResolve ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.depthResolveMode ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.stencilResolveMode ); + VULKAN_HPP_HASH_COMBINE( seed, subpassDescriptionDepthStencilResolve.pDepthStencilResolveAttachment ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassEndInfo const & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassEndInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassEndInfo.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM const & subpassFragmentDensityMapOffsetEndInfoQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.fragmentDensityOffsetCount ); + VULKAN_HPP_HASH_COMBINE( seed, subpassFragmentDensityMapOffsetEndInfoQCOM.pFragmentDensityOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT const & subpassResolvePerformanceQueryEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassResolvePerformanceQueryEXT.optimal ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI const & subpassShadingPipelineCreateInfoHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, subpassShadingPipelineCreateInfoHUAWEI.subpass ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT const & subresourceHostMemcpySizeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR const & subresourceLayout2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.subresourceLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & surfaceCapabilities2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.currentExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.minImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.currentTransform ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedCompositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedUsageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2EXT.supportedSurfaceCounters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const & surfaceCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.currentExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.minImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.maxImageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedTransforms ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.currentTransform ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedCompositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesKHR.supportedUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & surfaceCapabilities2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilities2KHR.surfaceCapabilities ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & surfaceCapabilitiesFullScreenExclusiveEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesFullScreenExclusiveEXT.fullScreenExclusiveSupported ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const & surfaceCapabilitiesPresentBarrierNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.presentBarrierSupported ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.format ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.colorSpace ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & surfaceFormat2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.surfaceFormat ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & surfaceFullScreenExclusiveInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.fullScreenExclusive ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & surfaceFullScreenExclusiveWin32InfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.hmonitor ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT const & surfacePresentModeCompatibilityEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT const & surfacePresentModeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.presentMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT const & surfacePresentScalingCapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentScaling ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentGravityX ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentGravityY ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.minScaledImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.maxScaledImageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & surfaceProtectedCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.supportsProtected ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & swapchainCounterCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.surfaceCounters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & swapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.surface ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageFormat ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageColorSpace ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageUsage ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageSharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pQueueFamilyIndices ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.preTransform ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.compositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.presentMode ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.clipped ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.oldSwapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & swapchainDisplayNativeHdrCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.localDimmingEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV const & swapchainLatencyCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.latencyModeEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const & swapchainPresentBarrierCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.presentBarrierEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT const & swapchainPresentFenceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.pFences ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT const & swapchainPresentModeInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT const & swapchainPresentModesCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT const & swapchainPresentScalingCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.scalingBehavior ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.presentGravityX ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.presentGravityY ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & textureLODGatherFormatPropertiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.supportsTextureGatherLODBiasAMD ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const & tilePropertiesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.tileSize ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.apronSize ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.origin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & timelineSemaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.waitSemaphoreValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pWaitSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.signalSemaphoreValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pSignalSemaphoreValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const & traceRaysIndirectCommand2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.width ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.height ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const & traceRaysIndirectCommandKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.width ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.height ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & validationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.initialDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pInitialData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & validationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.enabledValidationFeatureCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pEnabledValidationFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.disabledValidationFeatureCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pDisabledValidationFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & validationFlagsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.disabledValidationCheckCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pDisabledValidationChecks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const & vertexInputAttributeDescription2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.location ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.format ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const & vertexInputBindingDescription2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.stride ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.inputRate ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.divisor ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_VI_NN ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & viSurfaceCreateInfoNN ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.sType ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.flags ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_VI_NN*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & videoPictureResourceInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.imageViewBinding ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const & videoReferenceSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.slotIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pPictureResource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const & videoBeginCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSession ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSessionParameters ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pReferenceSlots ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const & videoCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferSizeAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pictureAccessGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxDpbSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxActiveReferencePictures ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.stdHeaderVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const & videoCodingControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const & videoDecodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR const & videoDecodeH264CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.fieldOffsetGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR const & videoDecodeH264DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR const & videoDecodeH264PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.sliceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pSliceOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR const & videoDecodeH264ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.stdProfileIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.pictureLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR const & videoDecodeH264SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pStdPPSs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR const & videoDecodeH264SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.pParametersAddInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR const & videoDecodeH265CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.maxLevelIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR const & videoDecodeH265DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR const & videoDecodeH265PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.sliceSegmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pSliceSegmentOffsets ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR const & videoDecodeH265ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.stdProfileIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR const & videoDecodeH265SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdVPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdPPSs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR const & videoDecodeH265SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.pParametersAddInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const & videoDecodeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.dstPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pSetupReferenceSlot ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pReferenceSlots ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const & videoDecodeUsageInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.videoUsageHints ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const & videoEncodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlModes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxRateControlLayers ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxQualityLevels ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.encodeInputPictureGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.supportedEncodeFeedbackFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR const & videoEncodeH264CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxSliceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxPPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxBPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.expectDyadicTemporalLayerPattern ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.stdSyntaxFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR const & videoEncodeH264DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const & videoEncodeH264FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.frameISize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.framePSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.frameBSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR const & videoEncodeH264GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingB ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR const & videoEncodeH264NaluSliceInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.constantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.pStdSliceHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR const & videoEncodeH264PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.naluSliceEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pNaluSliceEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.generatePrefixNalu ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR const & videoEncodeH264ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.stdProfileIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & videoEncodeH264QpKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpB ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR const & videoEncodeH264QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredIdrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredConsecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredConstantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredMaxL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredMaxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredStdEntropyCodingModeFlag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR const & videoEncodeH264RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.idrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.consecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.temporalLayerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR const & videoEncodeH264RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMinQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMaxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.maxFrameSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR const & videoEncodeH264SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.useMaxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.maxLevelIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR const & videoEncodeH264SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pStdPPSs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR const & videoEncodeH264SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.pParametersAddInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR const & videoEncodeH264SessionParametersFeedbackInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.hasStdSPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.hasStdPPSOverrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR const & videoEncodeH264SessionParametersGetInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.writeStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.writeStdPPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.stdSPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.stdPPSId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR const & videoEncodeH265CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxSliceSegmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxTiles ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.ctbSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.transformBlockSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxPPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxBPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxSubLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.expectDyadicTemporalSubLayerPattern ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.stdSyntaxFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR const & videoEncodeH265DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const & videoEncodeH265FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.frameISize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.framePSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.frameBSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR const & videoEncodeH265GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingB ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR const & videoEncodeH265NaluSliceSegmentInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.constantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.pStdSliceSegmentHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR const & videoEncodeH265PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.naluSliceSegmentEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pNaluSliceSegmentEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pStdPictureInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR const & videoEncodeH265ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.stdProfileIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & videoEncodeH265QpKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpB ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR const & videoEncodeH265QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredIdrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredConsecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredSubLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredConstantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredMaxL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredMaxL1ReferenceCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR const & videoEncodeH265RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.idrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.consecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.subLayerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR const & videoEncodeH265RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMinQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMaxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.maxFrameSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR const & videoEncodeH265SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.useMaxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.maxLevelIdc ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR const & videoEncodeH265SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdVPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdPPSs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR const & videoEncodeH265SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.pParametersAddInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR const & videoEncodeH265SessionParametersFeedbackInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdVPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdSPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdPPSOverrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR const & videoEncodeH265SessionParametersGetInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdVPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdPPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdVPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdSPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdPPSId ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const & videoEncodeInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.srcPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pSetupReferenceSlot ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pReferenceSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.precedingExternallyEncodedBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR const & videoEncodeQualityLevelInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.qualityLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR const & videoEncodeQualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.preferredRateControlMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.preferredRateControlLayerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR const & videoEncodeRateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.averageBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.maxBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateNumerator ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateDenominator ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const & videoEncodeRateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.rateControlMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.layerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pLayers ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.virtualBufferSizeInMs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.initialVirtualBufferSizeInMs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR const & videoEncodeSessionParametersFeedbackInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.hasOverrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR const & videoEncodeSessionParametersGetInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.videoSessionParameters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR const & videoEncodeUsageInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.videoUsageHints ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.videoContentHints ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeUsageInfoKHR.tuningMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const & videoEndCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEndCodingInfoKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const & videoFormatPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.format ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.componentMapping ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageCreateFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageTiling ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatPropertiesKHR.imageUsageFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR const & videoInlineQueryInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.queryPool ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.firstQuery ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.queryCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR const & videoProfileListInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.profileCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileListInfoKHR.pProfiles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & videoSessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pVideoProfile ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pictureFormat ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.referencePictureFormat ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxDpbSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxActiveReferencePictures ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pStdHeaderVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR const & videoSessionMemoryRequirementsKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.memoryBindIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionMemoryRequirementsKHR.memoryRequirements ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & videoSessionParametersCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSessionParametersTemplate ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersCreateInfoKHR.videoSession ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const & videoSessionParametersUpdateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionParametersUpdateInfoKHR.updateSequenceCount ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & waylandSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.display ); + VULKAN_HPP_HASH_COMBINE( seed, waylandSurfaceCreateInfoKHR.surface ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & win32KeyedMutexAcquireReleaseInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.acquireCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireKeys ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pAcquireTimeouts ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.releaseCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pReleaseSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoKHR.pReleaseKeys ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & win32KeyedMutexAcquireReleaseInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.acquireCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireKeys ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pAcquireTimeoutMilliseconds ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.releaseCount ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pReleaseSyncs ); + VULKAN_HPP_HASH_COMBINE( seed, win32KeyedMutexAcquireReleaseInfoNV.pReleaseKeys ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & win32SurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.hinstance ); + VULKAN_HPP_HASH_COMBINE( seed, win32SurfaceCreateInfoKHR.hwnd ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const & writeDescriptorSetAccelerationStructureKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.accelerationStructureCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureKHR.pAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & writeDescriptorSetAccelerationStructureNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.accelerationStructureCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetAccelerationStructureNV.pAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock const & writeDescriptorSetInlineUniformBlock ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetInlineUniformBlock.pData ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & xcbSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.connection ); + VULKAN_HPP_HASH_COMBINE( seed, xcbSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & xlibSurfaceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.dpy ); + VULKAN_HPP_HASH_COMBINE( seed, xlibSurfaceCreateInfoKHR.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#endif // 14 <= VULKAN_HPP_CPP_VERSION + +} // namespace std +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_hpp_macros.hpp b/MacroLibX/third_party/vulkan/vulkan_hpp_macros.hpp new file mode 100644 index 0000000..9526a76 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_hpp_macros.hpp @@ -0,0 +1,280 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HPP_MACROS_HPP +#define VULKAN_HPP_MACROS_HPP + +#if defined( _MSVC_LANG ) +# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG +#else +# define VULKAN_HPP_CPLUSPLUS __cplusplus +#endif + +#if 202002L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 23 +#elif 201703L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 20 +#elif 201402L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 17 +#elif 201103L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 14 +#elif 199711L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 11 +#else +# error "vulkan.hpp needs at least c++ standard version 11" +#endif + +// include headers holding feature-test macros +#if 20 <= VULKAN_HPP_CPP_VERSION +# include +#else +# include +#endif + +#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif +#endif + +#if defined( VULKAN_HPP_NO_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS +# endif +# if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) +# define VULKAN_HPP_NO_UNION_CONSTRUCTORS +# endif +#endif + +#if defined( VULKAN_HPP_NO_SETTERS ) +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# define VULKAN_HPP_NO_STRUCT_SETTERS +# endif +# if !defined( VULKAN_HPP_NO_UNION_SETTERS ) +# define VULKAN_HPP_NO_UNION_SETTERS +# endif +#endif + +#if !defined( VULKAN_HPP_ASSERT ) +# define VULKAN_HPP_ASSERT assert +#endif + +#if !defined( VULKAN_HPP_ASSERT_ON_RESULT ) +# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT +#endif + +#if !defined( VULKAN_HPP_STATIC_ASSERT ) +# define VULKAN_HPP_STATIC_ASSERT static_assert +#endif + +#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) +# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 +#endif + +#if !defined( __has_include ) +# define __has_include( x ) false +#endif + +#if ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR ) +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +#endif + +#if ( 201803 <= __cpp_lib_span ) +# define VULKAN_HPP_SUPPORT_SPAN +#endif + +// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. +// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION +# endif +#endif + +#if defined( __GNUC__ ) +# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) +#endif + +#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ) +# if defined( __clang__ ) +# if __has_feature( cxx_unrestricted_unions ) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( __GNUC__ ) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( _MSC_VER ) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# endif +#endif + +#if !defined( VULKAN_HPP_INLINE ) +# if defined( __clang__ ) +# if __has_attribute( always_inline ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined( __GNUC__ ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# elif defined( _MSC_VER ) +# define VULKAN_HPP_INLINE inline +# else +# define VULKAN_HPP_INLINE inline +# endif +#endif + +#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_EXPLICIT +#else +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +#endif + +#if defined( __cpp_constexpr ) +# define VULKAN_HPP_CONSTEXPR constexpr +# if 201304 <= __cpp_constexpr +# define VULKAN_HPP_CONSTEXPR_14 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_14 +# endif +# if ( 201907 <= __cpp_constexpr ) && ( !defined( __GNUC__ ) || ( 110400 < GCC_VERSION ) ) +# define VULKAN_HPP_CONSTEXPR_20 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_20 +# endif +# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +#else +# define VULKAN_HPP_CONSTEXPR +# define VULKAN_HPP_CONSTEXPR_14 +# define VULKAN_HPP_CONST_OR_CONSTEXPR const +#endif + +#if !defined( VULKAN_HPP_CONSTEXPR_INLINE ) +# if 201606L <= __cpp_inline_variables +# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR inline +# else +# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR +# endif +#endif + +#if !defined( VULKAN_HPP_NOEXCEPT ) +# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) +# define VULKAN_HPP_NOEXCEPT +# else +# define VULKAN_HPP_NOEXCEPT noexcept +# define VULKAN_HPP_HAS_NOEXCEPT 1 +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept +# else +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS +# endif +# endif +#endif + +#if 14 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]] +#else +# define VULKAN_HPP_DEPRECATED( msg ) +#endif + +#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) +# define VULKAN_HPP_NODISCARD [[nodiscard]] +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]] +# else +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +# endif +#else +# define VULKAN_HPP_NODISCARD +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +#endif + +#if !defined( VULKAN_HPP_NAMESPACE ) +# define VULKAN_HPP_NAMESPACE vk +#endif + +#define VULKAN_HPP_STRINGIFY2( text ) #text +#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text ) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE ) + +#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) +# if defined( VK_NO_PROTOTYPES ) +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 +# else +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 +# endif +#endif + +#if !defined( VULKAN_HPP_STORAGE_API ) +# if defined( VULKAN_HPP_STORAGE_SHARED ) +# if defined( _MSC_VER ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __declspec( dllexport ) +# else +# define VULKAN_HPP_STORAGE_API __declspec( dllimport ) +# endif +# elif defined( __clang__ ) || defined( __GNUC__ ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) ) +# else +# define VULKAN_HPP_STORAGE_API +# endif +# else +# define VULKAN_HPP_STORAGE_API +# pragma warning Unknown import / export semantics +# endif +# else +# define VULKAN_HPP_STORAGE_API +# endif +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + class DispatchLoaderDynamic; +} // namespace VULKAN_HPP_NAMESPACE + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + VULKAN_HPP_STORAGE_API ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } + +namespace VULKAN_HPP_NAMESPACE +{ + extern VULKAN_HPP_STORAGE_API VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic defaultDispatchLoaderDynamic; +} // namespace VULKAN_HPP_NAMESPACE +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::getDispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE +# endif +#endif + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic +# endif +#endif + +#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER ) +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT +#else +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {} +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER +#endif + +#endif \ No newline at end of file diff --git a/MacroLibX/third_party/vulkan/vulkan_ios.h b/MacroLibX/third_party/vulkan/vulkan_ios.h new file mode 100644 index 0000000..211429f --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_ios.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_IOS_H_ +#define VULKAN_IOS_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_MVK_ios_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_MVK_ios_surface 1 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 +#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" +typedef VkFlags VkIOSSurfaceCreateFlagsMVK; +typedef struct VkIOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkIOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkIOSSurfaceCreateInfoMVK; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( + VkInstance instance, + const VkIOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_macos.h b/MacroLibX/third_party/vulkan/vulkan_macos.h new file mode 100644 index 0000000..c6509cc --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_macos.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_MACOS_H_ +#define VULKAN_MACOS_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_MVK_macos_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_MVK_macos_surface 1 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 +#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" +typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; +typedef struct VkMacOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkMacOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkMacOSSurfaceCreateInfoMVK; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( + VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_metal.h b/MacroLibX/third_party/vulkan/vulkan_metal.h new file mode 100644 index 0000000..94563a0 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_metal.h @@ -0,0 +1,195 @@ +#ifndef VULKAN_METAL_H_ +#define VULKAN_METAL_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_metal_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_metal_surface 1 +#ifdef __OBJC__ +@class CAMetalLayer; +#else +typedef void CAMetalLayer; +#endif + +#define VK_EXT_METAL_SURFACE_SPEC_VERSION 1 +#define VK_EXT_METAL_SURFACE_EXTENSION_NAME "VK_EXT_metal_surface" +typedef VkFlags VkMetalSurfaceCreateFlagsEXT; +typedef struct VkMetalSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkMetalSurfaceCreateFlagsEXT flags; + const CAMetalLayer* pLayer; +} VkMetalSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMetalSurfaceEXT)(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT( + VkInstance instance, + const VkMetalSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_EXT_metal_objects is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_metal_objects 1 +#ifdef __OBJC__ +@protocol MTLDevice; +typedef id MTLDevice_id; +#else +typedef void* MTLDevice_id; +#endif + +#ifdef __OBJC__ +@protocol MTLCommandQueue; +typedef id MTLCommandQueue_id; +#else +typedef void* MTLCommandQueue_id; +#endif + +#ifdef __OBJC__ +@protocol MTLBuffer; +typedef id MTLBuffer_id; +#else +typedef void* MTLBuffer_id; +#endif + +#ifdef __OBJC__ +@protocol MTLTexture; +typedef id MTLTexture_id; +#else +typedef void* MTLTexture_id; +#endif + +typedef struct __IOSurface* IOSurfaceRef; +#ifdef __OBJC__ +@protocol MTLSharedEvent; +typedef id MTLSharedEvent_id; +#else +typedef void* MTLSharedEvent_id; +#endif + +#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 1 +#define VK_EXT_METAL_OBJECTS_EXTENSION_NAME "VK_EXT_metal_objects" + +typedef enum VkExportMetalObjectTypeFlagBitsEXT { + VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT = 0x00000001, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT = 0x00000002, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT = 0x00000004, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT = 0x00000008, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT = 0x00000010, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT = 0x00000020, + VK_EXPORT_METAL_OBJECT_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkExportMetalObjectTypeFlagBitsEXT; +typedef VkFlags VkExportMetalObjectTypeFlagsEXT; +typedef struct VkExportMetalObjectCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkExportMetalObjectTypeFlagBitsEXT exportObjectType; +} VkExportMetalObjectCreateInfoEXT; + +typedef struct VkExportMetalObjectsInfoEXT { + VkStructureType sType; + const void* pNext; +} VkExportMetalObjectsInfoEXT; + +typedef struct VkExportMetalDeviceInfoEXT { + VkStructureType sType; + const void* pNext; + MTLDevice_id mtlDevice; +} VkExportMetalDeviceInfoEXT; + +typedef struct VkExportMetalCommandQueueInfoEXT { + VkStructureType sType; + const void* pNext; + VkQueue queue; + MTLCommandQueue_id mtlCommandQueue; +} VkExportMetalCommandQueueInfoEXT; + +typedef struct VkExportMetalBufferInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + MTLBuffer_id mtlBuffer; +} VkExportMetalBufferInfoEXT; + +typedef struct VkImportMetalBufferInfoEXT { + VkStructureType sType; + const void* pNext; + MTLBuffer_id mtlBuffer; +} VkImportMetalBufferInfoEXT; + +typedef struct VkExportMetalTextureInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageView imageView; + VkBufferView bufferView; + VkImageAspectFlagBits plane; + MTLTexture_id mtlTexture; +} VkExportMetalTextureInfoEXT; + +typedef struct VkImportMetalTextureInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits plane; + MTLTexture_id mtlTexture; +} VkImportMetalTextureInfoEXT; + +typedef struct VkExportMetalIOSurfaceInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + IOSurfaceRef ioSurface; +} VkExportMetalIOSurfaceInfoEXT; + +typedef struct VkImportMetalIOSurfaceInfoEXT { + VkStructureType sType; + const void* pNext; + IOSurfaceRef ioSurface; +} VkImportMetalIOSurfaceInfoEXT; + +typedef struct VkExportMetalSharedEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkEvent event; + MTLSharedEvent_id mtlSharedEvent; +} VkExportMetalSharedEventInfoEXT; + +typedef struct VkImportMetalSharedEventInfoEXT { + VkStructureType sType; + const void* pNext; + MTLSharedEvent_id mtlSharedEvent; +} VkImportMetalSharedEventInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkExportMetalObjectsEXT)(VkDevice device, VkExportMetalObjectsInfoEXT* pMetalObjectsInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkExportMetalObjectsEXT( + VkDevice device, + VkExportMetalObjectsInfoEXT* pMetalObjectsInfo); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_raii.hpp b/MacroLibX/third_party/vulkan/vulkan_raii.hpp new file mode 100644 index 0000000..e745cdf --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_raii.hpp @@ -0,0 +1,21558 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_RAII_HPP +#define VULKAN_RAII_HPP + +#include +#include // std::exchange, std::forward +#include + +#if !defined( VULKAN_HPP_RAII_NAMESPACE ) +# define VULKAN_HPP_RAII_NAMESPACE raii +#endif + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !defined( VULKAN_HPP_NO_EXCEPTIONS ) +namespace VULKAN_HPP_NAMESPACE +{ + namespace VULKAN_HPP_RAII_NAMESPACE + { + template + VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue ) + { +# if ( 14 <= VULKAN_HPP_CPP_VERSION ) + return std::exchange( obj, std::forward( newValue ) ); +# else + T oldValue = std::move( obj ); + obj = std::forward( newValue ); + return oldValue; +# endif + } + + class ContextDispatcher : public DispatchLoaderBase + { + public: + ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) + : vkGetInstanceProcAddr( getProcAddr ) + //=== VK_VERSION_1_0 === + , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) + , vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) + , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) + //=== VK_VERSION_1_1 === + , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + { + } + + public: + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + + //=== VK_VERSION_1_0 === + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + }; + + class InstanceDispatcher : public DispatchLoaderBase + { + public: + InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + + //=== VK_EXT_sample_locations === + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + + //=== VK_KHR_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + + //=== VK_KHR_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + } + + public: + //=== VK_VERSION_1_0 === + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + + //=== VK_VERSION_1_3 === + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + + //=== VK_KHR_surface === + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + + //=== VK_KHR_swapchain === + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + + //=== VK_KHR_display === + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +# else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + + //=== VK_KHR_video_queue === + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +# else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + + //=== VK_KHR_get_physical_device_properties2 === + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +# else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + + //=== VK_KHR_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + + //=== VK_KHR_external_semaphore_capabilities === + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + + //=== VK_EXT_direct_mode_display === + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +# else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + + //=== VK_KHR_external_fence_capabilities === + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + + //=== VK_KHR_performance_query === + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + + //=== VK_KHR_get_surface_capabilities2 === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + + //=== VK_KHR_get_display_properties2 === + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + + //=== VK_EXT_sample_locations === + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +# else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +# else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + + //=== VK_EXT_tooling_info === + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + + //=== VK_NV_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + + //=== VK_NV_coverage_reduction_mode === + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + + //=== VK_EXT_acquire_drm_display === + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + + //=== VK_KHR_video_encode_queue === + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +# else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +# else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +# else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + + //=== VK_KHR_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; + + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + }; + + class DeviceDispatcher : public DispatchLoaderBase + { + public: + DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + } + + public: + //=== VK_VERSION_1_0 === + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + + //=== VK_VERSION_1_1 === + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + + //=== VK_VERSION_1_2 === + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; + + //=== VK_VERSION_1_3 === + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + + //=== VK_KHR_swapchain === + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + + //=== VK_KHR_display_swapchain === + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + + //=== VK_EXT_debug_marker === + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + + //=== VK_KHR_video_queue === + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; + + //=== VK_KHR_video_decode_queue === + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; + + //=== VK_EXT_transform_feedback === + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + + //=== VK_NVX_binary_import === + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; + + //=== VK_NVX_image_view_handle === + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + + //=== VK_AMD_draw_indirect_count === + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + + //=== VK_AMD_shader_info === + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + + //=== VK_KHR_dynamic_rendering === + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +# else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + + //=== VK_KHR_maintenance1 === + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +# else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +# else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; + + //=== VK_KHR_push_descriptor === + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + + //=== VK_EXT_conditional_rendering === + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + + //=== VK_KHR_descriptor_update_template === + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + + //=== VK_NV_clip_space_w_scaling === + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + + //=== VK_EXT_display_control === + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + + //=== VK_GOOGLE_display_timing === + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + + //=== VK_EXT_discard_rectangles === + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; + + //=== VK_EXT_hdr_metadata === + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + + //=== VK_KHR_create_renderpass2 === + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + + //=== VK_KHR_shared_presentable_image === + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +# else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + + //=== VK_KHR_performance_query === + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + + //=== VK_EXT_debug_utils === + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +# else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +# else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + + //=== VK_KHR_get_memory_requirements2 === + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + + //=== VK_KHR_acceleration_structure === + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + + //=== VK_KHR_ray_tracing_pipeline === + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + + //=== VK_KHR_sampler_ycbcr_conversion === + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + + //=== VK_KHR_bind_memory2 === + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + + //=== VK_EXT_image_drm_format_modifier === + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + + //=== VK_EXT_validation_cache === + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + + //=== VK_NV_shading_rate_image === + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + + //=== VK_NV_ray_tracing === + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + + //=== VK_KHR_maintenance3 === + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + + //=== VK_KHR_draw_indirect_count === + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + + //=== VK_EXT_external_memory_host === + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; + + //=== VK_AMD_buffer_marker === + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + + //=== VK_NV_mesh_shader === + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + + //=== VK_NV_scissor_exclusive === + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + + //=== VK_NV_device_diagnostic_checkpoints === + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + + //=== VK_KHR_timeline_semaphore === + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + + //=== VK_INTEL_performance_query === + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + + //=== VK_AMD_display_native_hdr === + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + + //=== VK_KHR_fragment_shading_rate === + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + + //=== VK_EXT_buffer_device_address === + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + + //=== VK_KHR_present_wait === + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; + + //=== VK_EXT_line_rasterization === + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + + //=== VK_EXT_host_query_reset === + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + + //=== VK_EXT_extended_dynamic_state === + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; + + //=== VK_KHR_deferred_host_operations === + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; + + //=== VK_KHR_pipeline_executable_properties === + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + + //=== VK_EXT_host_image_copy === + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; + + //=== VK_KHR_map_memory2 === + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; + + //=== VK_EXT_swapchain_maintenance1 === + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; + + //=== VK_NV_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + + //=== VK_EXT_depth_bias_control === + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; + + //=== VK_EXT_private_data === + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + + //=== VK_KHR_video_encode_queue === + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +# else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +# else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + + //=== VK_EXT_descriptor_buffer === + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; + + //=== VK_NV_fragment_shading_rate_enums === + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; + + //=== VK_EXT_mesh_shader === + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; + + //=== VK_KHR_copy_commands2 === + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + + //=== VK_EXT_device_fault === + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; + + //=== VK_EXT_vertex_input_dynamic_state === + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +# else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +# else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +# else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; + + //=== VK_HUAWEI_invocation_mask === + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; + + //=== VK_NV_external_memory_rdma === + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; + + //=== VK_EXT_pipeline_properties === + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; + + //=== VK_EXT_extended_dynamic_state2 === + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + + //=== VK_EXT_color_write_enable === + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + + //=== VK_KHR_ray_tracing_maintenance1 === + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; + + //=== VK_EXT_multi_draw === + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + + //=== VK_EXT_opacity_micromap === + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; + + //=== VK_HUAWEI_cluster_culling_shader === + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; + + //=== VK_EXT_pageable_device_local_memory === + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; + + //=== VK_KHR_maintenance4 === + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; + + //=== VK_VALVE_descriptor_set_host_mapping === + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; + + //=== VK_NV_copy_memory_indirect === + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; + + //=== VK_NV_memory_decompression === + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + + //=== VK_NV_device_generated_commands_compute === + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; + + //=== VK_EXT_extended_dynamic_state3 === + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; + + //=== VK_EXT_shader_module_identifier === + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; + + //=== VK_NV_optical_flow === + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + + //=== VK_KHR_maintenance5 === + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + + //=== VK_EXT_shader_object === + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + + //=== VK_QCOM_tile_properties === + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; + + //=== VK_NV_low_latency2 === + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +# else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; + + //=== VK_KHR_maintenance6 === + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; + }; + + //======================================== + //=== RAII HANDLE forward declarations === + //======================================== + + //=== VK_VERSION_1_0 === + class Instance; + class PhysicalDevice; + class Device; + class Queue; + class DeviceMemory; + class Fence; + class Semaphore; + class Event; + class QueryPool; + class Buffer; + class BufferView; + class Image; + class ImageView; + class ShaderModule; + class PipelineCache; + class Pipeline; + class PipelineLayout; + class Sampler; + class DescriptorPool; + class DescriptorSet; + class DescriptorSetLayout; + class Framebuffer; + class RenderPass; + class CommandPool; + class CommandBuffer; + + //=== VK_VERSION_1_1 === + class SamplerYcbcrConversion; + class DescriptorUpdateTemplate; + + //=== VK_VERSION_1_3 === + class PrivateDataSlot; + + //=== VK_KHR_surface === + class SurfaceKHR; + + //=== VK_KHR_swapchain === + class SwapchainKHR; + + //=== VK_KHR_display === + class DisplayKHR; + class DisplayModeKHR; + + //=== VK_EXT_debug_report === + class DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + class VideoSessionKHR; + class VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + class CuModuleNVX; + class CuFunctionNVX; + + //=== VK_EXT_debug_utils === + class DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + class AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + class ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + class AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + class PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + class DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + class IndirectCommandsLayoutNV; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + class CudaModuleNV; + class CudaFunctionNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + class BufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + class MicromapEXT; + + //=== VK_NV_optical_flow === + class OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + class ShaderEXT; + + //==================== + //=== RAII HANDLES === + //==================== + + class Context + { + public: +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + Context() + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( + m_dynamicLoader.getProcAddress( "vkGetInstanceProcAddr" ) ) ) +# else + Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) ) +# endif + { + } + + ~Context() = default; + + Context( Context const & ) = delete; + Context( Context && rhs ) VULKAN_HPP_NOEXCEPT +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + : m_dynamicLoader( std::move( rhs.m_dynamicLoader ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) +# else + : m_dispatcher( rhs.m_dispatcher.release() ) +# endif + { + } + Context & operator=( Context const & ) = delete; + + Context & operator=( Context && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + m_dynamicLoader = std::move( rhs.m_dynamicLoader ); +# endif + m_dispatcher.reset( rhs.m_dispatcher.release() ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context & rhs ) + { +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + std::swap( m_dynamicLoader, rhs.m_dynamicLoader ); +# endif + m_dispatcher.swap( rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Instance + createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector + enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::vector enumerateInstanceLayerProperties() const; + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD uint32_t enumerateInstanceVersion() const; + + private: +# if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + VULKAN_HPP_NAMESPACE::DynamicLoader m_dynamicLoader; +# endif + std::unique_ptr m_dispatcher; + }; + + class Instance + { + public: + using CType = VkInstance; + using CppType = vk::Instance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + + public: + Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, + VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_allocator( static_cast( allocator ) ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( context.getDispatcher()->vkCreateInstance( reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_instance ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateInstance" ); + } + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, + static_cast( m_instance ) ) ); + } + + Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, + VkInstance instance, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( instance ), m_allocator( static_cast( allocator ) ) + { + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, + static_cast( m_instance ) ) ); + } + + Instance( std::nullptr_t ) {} + + ~Instance() + { + clear(); + } + + Instance() = delete; + Instance( Instance const & ) = delete; + + Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) + { + } + + Instance & operator=( Instance const & ) = delete; + + Instance & operator=( Instance && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Instance const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_instance ) + { + getDispatcher()->vkDestroyInstance( static_cast( m_instance ), reinterpret_cast( m_allocator ) ); + } + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Instance release() + { + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_instance, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD std::vector enumeratePhysicalDevices() const; + + VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD std::vector enumeratePhysicalDeviceGroups() const; + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT + createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_GGP*/ + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + + VULKAN_HPP_NODISCARD std::vector enumeratePhysicalDeviceGroupsKHR() const; + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT + createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_headless_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; + }; + + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + using CppType = vk::PhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + + public: + PhysicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkPhysicalDevice physicalDevice ) + : m_physicalDevice( physicalDevice ), m_dispatcher( instance.getDispatcher() ) + { + } + + PhysicalDevice( std::nullptr_t ) {} + + ~PhysicalDevice() + { + clear(); + } + + PhysicalDevice() = delete; + + PhysicalDevice( PhysicalDevice const & rhs ) : m_physicalDevice( rhs.m_physicalDevice ), m_dispatcher( rhs.m_dispatcher ) {} + + PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PhysicalDevice & operator=( PhysicalDevice const & rhs ) + { + m_physicalDevice = rhs.m_physicalDevice; + m_dispatcher = rhs.m_dispatcher; + return *this; + } + + PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PhysicalDevice const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PhysicalDevice release() + { + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_physicalDevice, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties + getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Device + createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector + enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::vector enumerateDeviceLayerProperties() const; + + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const; + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getFeatures2() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getProperties2() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2() const; + + template + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_NODISCARD std::vector getToolProperties() const; + + //=== VK_KHR_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + VULKAN_HPP_NODISCARD std::vector + getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::vector + getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD std::vector getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD std::vector getDisplayPropertiesKHR() const; + + VULKAN_HPP_NODISCARD std::vector getDisplayPlanePropertiesKHR() const; + + VULKAN_HPP_NODISCARD std::vector getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const; + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 + getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display & display ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const; + + VULKAN_HPP_NODISCARD std::vector + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const; + + //=== VK_NV_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV getExternalImageFormatPropertiesNV( + VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_get_physical_device_properties2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; + + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR() const; + + template + VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const; + + //=== VK_KHR_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties + getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_external_semaphore_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const; +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + //=== VK_KHR_external_fence_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties + getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_performance_query === + + VULKAN_HPP_NODISCARD + std::pair, std::vector> + enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const; + + VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_get_surface_capabilities2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + VULKAN_HPP_NODISCARD std::vector + getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + template + VULKAN_HPP_NODISCARD std::vector getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; + + //=== VK_KHR_get_display_properties2 === + + VULKAN_HPP_NODISCARD std::vector getDisplayProperties2KHR() const; + + VULKAN_HPP_NODISCARD std::vector getDisplayPlaneProperties2KHR() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR + getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const; + + //=== VK_EXT_sample_locations === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_calibrated_timestamps === + + VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsEXT() const; + + //=== VK_KHR_fragment_shading_rate === + + VULKAN_HPP_NODISCARD std::vector getFragmentShadingRatesKHR() const; + + //=== VK_EXT_tooling_info === + + VULKAN_HPP_NODISCARD std::vector getToolPropertiesEXT() const; + + //=== VK_NV_cooperative_matrix === + + VULKAN_HPP_NODISCARD std::vector getCooperativeMatrixPropertiesNV() const; + + //=== VK_NV_coverage_reduction_mode === + + VULKAN_HPP_NODISCARD std::vector getSupportedFramebufferMixedSamplesCombinationsNV() const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VULKAN_HPP_NODISCARD std::vector + getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_acquire_drm_display === + + void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const; + + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getWinrtDisplayNV( uint32_t deviceRelativeId ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + + VULKAN_HPP_NODISCARD std::vector + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const; + + //=== VK_KHR_cooperative_matrix === + + VULKAN_HPP_NODISCARD std::vector getCooperativeMatrixPropertiesKHR() const; + + //=== VK_KHR_calibrated_timestamps === + + VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsKHR() const; + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class PhysicalDevices : public std::vector + { + public: + PhysicalDevices( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = instance.getDispatcher(); + std::vector physicalDevices; + uint32_t physicalDeviceCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + dispatcher->vkEnumeratePhysicalDevices( static_cast( *instance ), &physicalDeviceCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( + dispatcher->vkEnumeratePhysicalDevices( static_cast( *instance ), &physicalDeviceCount, physicalDevices.data() ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + this->reserve( physicalDeviceCount ); + for ( auto const & physicalDevice : physicalDevices ) + { + this->emplace_back( instance, physicalDevice ); + } + } + else + { + detail::throwResultException( result, "vkEnumeratePhysicalDevices" ); + } + } + + PhysicalDevices( std::nullptr_t ) {} + + PhysicalDevices() = delete; + PhysicalDevices( PhysicalDevices const & ) = delete; + PhysicalDevices( PhysicalDevices && rhs ) = default; + PhysicalDevices & operator=( PhysicalDevices const & ) = delete; + PhysicalDevices & operator=( PhysicalDevices && rhs ) = default; + }; + + class Device + { + public: + using CType = VkDevice; + using CppType = vk::Device; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_allocator( static_cast( allocator ) ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + physicalDevice.getDispatcher()->vkCreateDevice( static_cast( *physicalDevice ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_device ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDevice" ); + } + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, + static_cast( m_device ) ) ); + } + + Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, + VkDevice device, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ), m_allocator( static_cast( allocator ) ) + { + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, + static_cast( m_device ) ) ); + } + + Device( std::nullptr_t ) {} + + ~Device() + { + clear(); + } + + Device() = delete; + Device( Device const & ) = delete; + + Device( Device && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( rhs.m_dispatcher.release() ) + { + } + + Device & operator=( Device const & ) = delete; + + Device & operator=( Device && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Device const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_device ) + { + getDispatcher()->vkDestroyDevice( static_cast( m_device ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Device release() + { + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_device, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return &*m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const; + + void waitIdle() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeviceMemory + allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const; + + void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence + createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Semaphore + createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Event + createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::QueryPool + createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Buffer + createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferView + createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Image + createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ImageView + createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderModule + createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineCache + createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline + createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector + createComputePipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline + createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineLayout + createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Sampler + createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout + createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorPool + createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector + allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const; + + void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const + VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Framebuffer + createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass + createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CommandPool + createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD std::vector + allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const; + + //=== VK_VERSION_1_1 === + + void bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + void bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion + createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate + createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass + createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; + + void signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD uint64_t + getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot + createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const; + + VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const; + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR + createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR getGroupPresentCapabilitiesKHR() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const; + + VULKAN_HPP_NODISCARD std::pair + acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const; + + //=== VK_KHR_display_swapchain === + + VULKAN_HPP_NODISCARD std::vector + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR + createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_EXT_debug_marker === + + void debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const; + + void debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const; + + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR + createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR + createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_NVX_binary_import === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX + createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX + createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_NVX_image_view_handle === + + VULKAN_HPP_NODISCARD uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR + getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VULKAN_HPP_NODISCARD int getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR + getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + void importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const; + + VULKAN_HPP_NODISCARD HANDLE getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + void importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const; + + VULKAN_HPP_NODISCARD int getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const; + + //=== VK_KHR_descriptor_update_template === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate + createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_display_control === + + void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence + registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_EXT_hdr_metadata === + + void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata ) const; + + //=== VK_KHR_create_renderpass2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass + createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + void importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const; + + VULKAN_HPP_NODISCARD HANDLE getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + void importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const; + + VULKAN_HPP_NODISCARD int getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const; + + //=== VK_KHR_performance_query === + + void acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const; + + void releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + void setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const; + + void setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; + + VULKAN_HPP_NODISCARD struct AHardwareBuffer * + getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VULKAN_HPP_NODISCARD std::vector createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_get_memory_requirements2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const; + + //=== VK_KHR_acceleration_structure === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR + createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const; + + template + VULKAN_HPP_NODISCARD std::vector writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const; + + template + VULKAN_HPP_NODISCARD DataType writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + //=== VK_KHR_ray_tracing_pipeline === + + VULKAN_HPP_NODISCARD std::vector createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_KHR_sampler_ycbcr_conversion === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion + createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_bind_memory2 === + + void bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + void bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + //=== VK_EXT_validation_cache === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT + createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_NV_ray_tracing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV + createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + void bindAccelerationStructureMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; + + VULKAN_HPP_NODISCARD std::vector + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline + createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_KHR_maintenance3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_external_memory_host === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const; + + //=== VK_EXT_calibrated_timestamps === + + VULKAN_HPP_NODISCARD std::pair, uint64_t> + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const; + + VULKAN_HPP_NODISCARD std::pair + getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const; + + //=== VK_KHR_timeline_semaphore === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; + + void signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const; + + //=== VK_INTEL_performance_query === + + void initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const; + + void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL + acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PerformanceValueINTEL + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const; + + //=== VK_EXT_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD uint64_t + getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_deferred_host_operations === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR + createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_KHR_pipeline_executable_properties === + + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const; + + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const; + + VULKAN_HPP_NODISCARD std::vector + getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const; + + //=== VK_EXT_host_image_copy === + + void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const; + + void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const; + + void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const; + + void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; + + //=== VK_KHR_map_memory2 === + + VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const; + + void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_swapchain_maintenance1 === + + void releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const; + + //=== VK_NV_device_generated_commands === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV + createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_EXT_private_data === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot + createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const; + + VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_NODISCARD std::pair> + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const; + + template + VULKAN_HPP_NODISCARD std::pair, std::vector> + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV + createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV + createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD DataType getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD DataType getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD DataType + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD DataType getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD DataType + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const; + + //=== VK_EXT_device_fault === + + VULKAN_HPP_NODISCARD + std::pair> + getFaultInfoEXT() const; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + VULKAN_HPP_NODISCARD zx_handle_t getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + void importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const; + + VULKAN_HPP_NODISCARD zx_handle_t + getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA + createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_NV_external_memory_rdma === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RemoteAddressNV + getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const; + + //=== VK_EXT_pipeline_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BaseOutStructure getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const; + + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::MicromapEXT + createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD std::vector + writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const; + + template + VULKAN_HPP_NODISCARD DataType writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance4 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const; + + //=== VK_VALVE_descriptor_set_host_mapping === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE( + const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands_compute === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_module_identifier === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_optical_flow === + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV + createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + VULKAN_HPP_NODISCARD std::vector + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderEXT + createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_QCOM_tile_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + + VULKAN_HPP_NODISCARD std::pair, uint64_t> + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const; + + VULKAN_HPP_NODISCARD std::pair + getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; + }; + + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + using CppType = vk::AccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: + AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateAccelerationStructureKHR( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_accelerationStructure ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateAccelerationStructureKHR" ); + } + } + + AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkAccelerationStructureKHR accelerationStructure, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_accelerationStructure( accelerationStructure ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + AccelerationStructureKHR( std::nullptr_t ) {} + + ~AccelerationStructureKHR() + { + clear(); + } + + AccelerationStructureKHR() = delete; + AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete; + + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + AccelerationStructureKHR & operator=( AccelerationStructureKHR const & ) = delete; + + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructure, rhs.m_accelerationStructure ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructure; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_accelerationStructure ) + { + getDispatcher()->vkDestroyAccelerationStructureKHR( static_cast( m_device ), + static_cast( m_accelerationStructure ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_accelerationStructure = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_accelerationStructure, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructure, rhs.m_accelerationStructure ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + using CppType = vk::AccelerationStructureNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + public: + AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateAccelerationStructureNV( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_accelerationStructure ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateAccelerationStructureNV" ); + } + } + + AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkAccelerationStructureNV accelerationStructure, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_accelerationStructure( accelerationStructure ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + AccelerationStructureNV( std::nullptr_t ) {} + + ~AccelerationStructureNV() + { + clear(); + } + + AccelerationStructureNV() = delete; + AccelerationStructureNV( AccelerationStructureNV const & ) = delete; + + AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + AccelerationStructureNV & operator=( AccelerationStructureNV const & ) = delete; + + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructure, rhs.m_accelerationStructure ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructure; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_accelerationStructure ) + { + getDispatcher()->vkDestroyAccelerationStructureNV( static_cast( m_device ), + static_cast( m_accelerationStructure ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_accelerationStructure = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_accelerationStructure, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructure, rhs.m_accelerationStructure ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD std::vector getHandle( size_t dataSize ) const; + + template + VULKAN_HPP_NODISCARD DataType getHandle() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Buffer + { + public: + using CType = VkBuffer; + using CppType = vk::Buffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + + public: + Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateBuffer( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_buffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateBuffer" ); + } + } + + Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBuffer buffer, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_buffer( buffer ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Buffer( std::nullptr_t ) {} + + ~Buffer() + { + clear(); + } + + Buffer() = delete; + Buffer( Buffer const & ) = delete; + + Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Buffer & operator=( Buffer const & ) = delete; + + Buffer & operator=( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_buffer, rhs.m_buffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Buffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_buffer ) + { + getDispatcher()->vkDestroyBuffer( + static_cast( m_device ), static_cast( m_buffer ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_buffer = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Buffer release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_buffer, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_buffer, rhs.m_buffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + class BufferCollectionFUCHSIA + { + public: + using CType = VkBufferCollectionFUCHSIA; + using CppType = vk::BufferCollectionFUCHSIA; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; + + public: + BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateBufferCollectionFUCHSIA( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_collection ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateBufferCollectionFUCHSIA" ); + } + } + + BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_collection( collection ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + BufferCollectionFUCHSIA( std::nullptr_t ) {} + + ~BufferCollectionFUCHSIA() + { + clear(); + } + + BufferCollectionFUCHSIA() = delete; + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & ) = delete; + + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_collection( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & ) = delete; + + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_collection, rhs.m_collection ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_collection; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_collection ) + { + getDispatcher()->vkDestroyBufferCollectionFUCHSIA( static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_collection = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_collection, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_collection, rhs.m_collection ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_FUCHSIA_buffer_collection === + + void setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const; + + void setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + class BufferView + { + public: + using CType = VkBufferView; + using CppType = vk::BufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + + public: + BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateBufferView( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_bufferView ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateBufferView" ); + } + } + + BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkBufferView bufferView, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_bufferView( bufferView ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + BufferView( std::nullptr_t ) {} + + ~BufferView() + { + clear(); + } + + BufferView() = delete; + BufferView( BufferView const & ) = delete; + + BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_bufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + BufferView & operator=( BufferView const & ) = delete; + + BufferView & operator=( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_bufferView, rhs.m_bufferView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::BufferView const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_bufferView ) + { + getDispatcher()->vkDestroyBufferView( + static_cast( m_device ), static_cast( m_bufferView ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_bufferView = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::BufferView release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_bufferView, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_bufferView, rhs.m_bufferView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class CommandPool + { + public: + using CType = VkCommandPool; + using CppType = vk::CommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + + public: + CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateCommandPool( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_commandPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateCommandPool" ); + } + } + + CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCommandPool commandPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_commandPool( commandPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CommandPool( std::nullptr_t ) {} + + ~CommandPool() + { + clear(); + } + + CommandPool() = delete; + CommandPool( CommandPool const & ) = delete; + + CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CommandPool & operator=( CommandPool const & ) = delete; + + CommandPool & operator=( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CommandPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_commandPool ) + { + getDispatcher()->vkDestroyCommandPool( + static_cast( m_device ), static_cast( m_commandPool ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_commandPool = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CommandPool release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_commandPool, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + void reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_1 === + + void trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance1 === + + void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + using CppType = vk::CommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + public: + CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool ) + : m_device( *device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() ) + { + } + + CommandBuffer( std::nullptr_t ) {} + + ~CommandBuffer() + { + clear(); + } + + CommandBuffer() = delete; + CommandBuffer( CommandBuffer const & ) = delete; + + CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_commandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CommandBuffer & operator=( CommandBuffer const & ) = delete; + + CommandBuffer & operator=( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_commandBuffer, rhs.m_commandBuffer ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CommandBuffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_commandBuffer ) + { + getDispatcher()->vkFreeCommandBuffers( + static_cast( m_device ), static_cast( m_commandPool ), 1, reinterpret_cast( &m_commandBuffer ) ); + } + m_device = nullptr; + m_commandPool = nullptr; + m_commandBuffer = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CommandBuffer release() + { + m_device = nullptr; + m_commandPool = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_commandBuffer, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_commandBuffer, rhs.m_commandBuffer ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + void begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const; + + void end() const; + + void reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; + + void setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT; + + void setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT; + + void setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT; + + void setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT; + + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT; + + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT; + + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT; + + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT; + + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + void bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const; + + void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT; + + void + drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT; + + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT; + + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT; + + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data ) const VULKAN_HPP_NOEXCEPT; + + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT; + + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT; + + void + clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT; + + void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects ) const VULKAN_HPP_NOEXCEPT; + + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT; + + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT; + + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const + VULKAN_HPP_NOEXCEPT; + + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values ) const VULKAN_HPP_NOEXCEPT; + + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT; + + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT; + + void endRenderPass() const VULKAN_HPP_NOEXCEPT; + + void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_1 === + + void setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT; + + void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_2 === + + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT; + + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VERSION_1_3 === + + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const; + + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + void + writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + + void endRendering() const VULKAN_HPP_NOEXCEPT; + + void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT; + + void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT; + + void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT; + + void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT; + + void bindVertexBuffers2( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT; + + void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT; + + void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_marker === + + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT; + + void debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT; + + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_video_queue === + + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT; + + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT; + + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_video_decode_queue === + + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_transform_feedback === + + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT; + + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT; + + void drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NVX_binary_import === + + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_draw_indirect_count === + + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_dynamic_rendering === + + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + + void endRenderingKHR() const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_device_group === + + void setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT; + + void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) + const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_push_descriptor === + + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const + VULKAN_HPP_NOEXCEPT; + + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_conditional_rendering === + + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT; + + void endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_clip_space_w_scaling === + + void setViewportWScalingNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_discard_rectangles === + + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles ) const VULKAN_HPP_NOEXCEPT; + + void setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_create_renderpass2 === + + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT; + + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_debug_utils === + + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT; + + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_acceleration_structure === + + void buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const; + + void buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts ) const; + + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + void writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_ray_tracing_pipeline === + + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT; + + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT; + + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_shading_rate_image === + + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT; + + void setViewportShadingRatePaletteNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT; + + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_ray_tracing === + + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT; + + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT; + + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT; + + void writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_draw_indirect_count === + + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_buffer_marker === + + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_mesh_shader === + + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT; + + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_scissor_exclusive === + + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables ) const + VULKAN_HPP_NOEXCEPT; + + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_INTEL_performance_query === + + void setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const; + + void setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const; + + void setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const; + + //=== VK_KHR_fragment_shading_rate === + + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_line_rasterization === + + void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state === + + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT; + + void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT; + + void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT; + + void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT; + + void bindVertexBuffers2EXT( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands === + + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_depth_bias_control === + + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_video_encode_queue === + + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_synchronization2 === + + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const; + + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT; + + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT; + + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_descriptor_buffer === + + void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos ) const + VULKAN_HPP_NOEXCEPT; + + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const; + + void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_fragment_shading_rate_enums === + + void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_mesh_shader === + + void drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_copy_commands2 === + + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT; + + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT; + + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_vertex_input_dynamic_state === + + void setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & + vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_subpass_shading === + + void subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_invocation_mask === + + void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state2 === + + void setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT; + + void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT; + + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT; + + void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_color_write_enable === + + void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_ray_tracing_maintenance1 === + + void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_multi_draw === + + void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT; + + void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_opacity_micromap === + + void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT; + + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_cluster_culling_shader === + + void drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_copy_memory_indirect === + + void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_memory_decompression === + + void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions ) const + VULKAN_HPP_NOEXCEPT; + + void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands_compute === + + void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state3 === + + void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT; + + void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT; + + void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT; + + void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT; + + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask ) const; + + void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT; + + void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT; + + void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT; + + void setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT; + + void setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations ) const + VULKAN_HPP_NOEXCEPT; + + void setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks ) const + VULKAN_HPP_NOEXCEPT; + + void setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT; + + void + setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT; + + void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT; + + void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT; + + void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT; + + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced ) const + VULKAN_HPP_NOEXCEPT; + + void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT; + + void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT; + + void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT; + + void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT; + + void setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles ) const + VULKAN_HPP_NOEXCEPT; + + void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT; + + void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT; + + void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_optical_flow === + + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance5 === + + void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance6 === + + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo ) const + VULKAN_HPP_NOEXCEPT; + + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const + VULKAN_HPP_NOEXCEPT; + + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class CommandBuffers : public std::vector + { + public: + CommandBuffers( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkAllocateCommandBuffers( + static_cast( *device ), reinterpret_cast( &allocateInfo ), commandBuffers.data() ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + this->reserve( allocateInfo.commandBufferCount ); + for ( auto const & commandBuffer : commandBuffers ) + { + this->emplace_back( device, commandBuffer, static_cast( allocateInfo.commandPool ) ); + } + } + else + { + detail::throwResultException( result, "vkAllocateCommandBuffers" ); + } + } + + CommandBuffers( std::nullptr_t ) {} + + CommandBuffers() = delete; + CommandBuffers( CommandBuffers const & ) = delete; + CommandBuffers( CommandBuffers && rhs ) = default; + CommandBuffers & operator=( CommandBuffers const & ) = delete; + CommandBuffers & operator=( CommandBuffers && rhs ) = default; + }; + + class CuFunctionNVX + { + public: + using CType = VkCuFunctionNVX; + using CppType = vk::CuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: + CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateCuFunctionNVX( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_function ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateCuFunctionNVX" ); + } + } + + CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCuFunctionNVX function, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_function( function ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CuFunctionNVX( std::nullptr_t ) {} + + ~CuFunctionNVX() + { + clear(); + } + + CuFunctionNVX() = delete; + CuFunctionNVX( CuFunctionNVX const & ) = delete; + + CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CuFunctionNVX & operator=( CuFunctionNVX const & ) = delete; + + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_function, rhs.m_function ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CuFunctionNVX const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_function; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_function ) + { + getDispatcher()->vkDestroyCuFunctionNVX( + static_cast( m_device ), static_cast( m_function ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_function = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CuFunctionNVX release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_function, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_function, rhs.m_function ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class CuModuleNVX + { + public: + using CType = VkCuModuleNVX; + using CppType = vk::CuModuleNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: + CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateCuModuleNVX( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_module ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateCuModuleNVX" ); + } + } + + CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCuModuleNVX module, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_module( module ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CuModuleNVX( std::nullptr_t ) {} + + ~CuModuleNVX() + { + clear(); + } + + CuModuleNVX() = delete; + CuModuleNVX( CuModuleNVX const & ) = delete; + + CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CuModuleNVX & operator=( CuModuleNVX const & ) = delete; + + CuModuleNVX & operator=( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_module, rhs.m_module ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CuModuleNVX const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_module; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_module ) + { + getDispatcher()->vkDestroyCuModuleNVX( + static_cast( m_device ), static_cast( m_module ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_module = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CuModuleNVX release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_module, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_module, rhs.m_module ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaFunctionNV + { + public: + using CType = VkCudaFunctionNV; + using CppType = vk::CudaFunctionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; + + public: + CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateCudaFunctionNV( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_function ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateCudaFunctionNV" ); + } + } + + CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCudaFunctionNV function, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_function( function ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CudaFunctionNV( std::nullptr_t ) {} + + ~CudaFunctionNV() + { + clear(); + } + + CudaFunctionNV() = delete; + CudaFunctionNV( CudaFunctionNV const & ) = delete; + + CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CudaFunctionNV & operator=( CudaFunctionNV const & ) = delete; + + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_function, rhs.m_function ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CudaFunctionNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_function; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_function ) + { + getDispatcher()->vkDestroyCudaFunctionNV( + static_cast( m_device ), static_cast( m_function ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_function = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CudaFunctionNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_function, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_function, rhs.m_function ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaModuleNV + { + public: + using CType = VkCudaModuleNV; + using CppType = vk::CudaModuleNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; + + public: + CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateCudaModuleNV( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_module ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateCudaModuleNV" ); + } + } + + CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCudaModuleNV module, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_module( module ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CudaModuleNV( std::nullptr_t ) {} + + ~CudaModuleNV() + { + clear(); + } + + CudaModuleNV() = delete; + CudaModuleNV( CudaModuleNV const & ) = delete; + + CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CudaModuleNV & operator=( CudaModuleNV const & ) = delete; + + CudaModuleNV & operator=( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_module, rhs.m_module ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CudaModuleNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_module; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_module ) + { + getDispatcher()->vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( m_module ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_module = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CudaModuleNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_module, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_module, rhs.m_module ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NV_cuda_kernel_launch === + + VULKAN_HPP_NODISCARD std::vector getCache() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + using CppType = vk::DebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: + DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateDebugReportCallbackEXT( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_callback ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDebugReportCallbackEXT" ); + } + } + + DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkDebugReportCallbackEXT callback, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_callback( callback ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + } + + DebugReportCallbackEXT( std::nullptr_t ) {} + + ~DebugReportCallbackEXT() + { + clear(); + } + + DebugReportCallbackEXT() = delete; + DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete; + + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_callback( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete; + + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_callback, rhs.m_callback ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_callback; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_callback ) + { + getDispatcher()->vkDestroyDebugReportCallbackEXT( static_cast( m_instance ), + static_cast( m_callback ), + reinterpret_cast( m_allocator ) ); + } + m_instance = nullptr; + m_callback = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT release() + { + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_callback, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_callback, rhs.m_callback ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + using CppType = vk::DebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateDebugUtilsMessengerEXT( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_messenger ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDebugUtilsMessengerEXT" ); + } + } + + DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkDebugUtilsMessengerEXT messenger, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_messenger( messenger ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + } + + DebugUtilsMessengerEXT( std::nullptr_t ) {} + + ~DebugUtilsMessengerEXT() + { + clear(); + } + + DebugUtilsMessengerEXT() = delete; + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & ) = delete; + + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_messenger( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & ) = delete; + + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_messenger, rhs.m_messenger ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_messenger; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_messenger ) + { + getDispatcher()->vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), + static_cast( m_messenger ), + reinterpret_cast( m_allocator ) ); + } + m_instance = nullptr; + m_messenger = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT release() + { + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_messenger, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_messenger, rhs.m_messenger ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + using CppType = vk::DeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateDeferredOperationKHR( static_cast( *device ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_operation ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDeferredOperationKHR" ); + } + } + + DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDeferredOperationKHR operation, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_operation( operation ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DeferredOperationKHR( std::nullptr_t ) {} + + ~DeferredOperationKHR() + { + clear(); + } + + DeferredOperationKHR() = delete; + DeferredOperationKHR( DeferredOperationKHR const & ) = delete; + + DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_operation( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DeferredOperationKHR & operator=( DeferredOperationKHR const & ) = delete; + + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_operation, rhs.m_operation ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_operation; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_operation ) + { + getDispatcher()->vkDestroyDeferredOperationKHR( static_cast( m_device ), + static_cast( m_operation ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_operation = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_operation, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_operation, rhs.m_operation ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_deferred_host_operations === + + VULKAN_HPP_NODISCARD uint32_t getMaxConcurrency() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getResult() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result join() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + using CppType = vk::DescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateDescriptorPool( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_descriptorPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDescriptorPool" ); + } + } + + DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_descriptorPool( descriptorPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DescriptorPool( std::nullptr_t ) {} + + ~DescriptorPool() + { + clear(); + } + + DescriptorPool() = delete; + DescriptorPool( DescriptorPool const & ) = delete; + + DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DescriptorPool & operator=( DescriptorPool const & ) = delete; + + DescriptorPool & operator=( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_descriptorPool ) + { + getDispatcher()->vkDestroyDescriptorPool( static_cast( m_device ), + static_cast( m_descriptorPool ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_descriptorPool = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DescriptorPool release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorPool, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + using CppType = vk::DescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + + public: + DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool ) + : m_device( *device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() ) + { + } + + DescriptorSet( std::nullptr_t ) {} + + ~DescriptorSet() + { + clear(); + } + + DescriptorSet() = delete; + DescriptorSet( DescriptorSet const & ) = delete; + + DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_descriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DescriptorSet & operator=( DescriptorSet const & ) = delete; + + DescriptorSet & operator=( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_descriptorSet, rhs.m_descriptorSet ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorSet const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_descriptorSet ) + { + getDispatcher()->vkFreeDescriptorSets( static_cast( m_device ), + static_cast( m_descriptorPool ), + 1, + reinterpret_cast( &m_descriptorSet ) ); + } + m_device = nullptr; + m_descriptorPool = nullptr; + m_descriptorSet = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DescriptorSet release() + { + m_device = nullptr; + m_descriptorPool = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorSet, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_descriptorSet, rhs.m_descriptorSet ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_1 === + + template + void updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_descriptor_update_template === + + template + void updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_VALVE_descriptor_set_host_mapping === + + VULKAN_HPP_NODISCARD void * getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DescriptorSets : public std::vector + { + public: + DescriptorSets( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkAllocateDescriptorSets( + static_cast( *device ), reinterpret_cast( &allocateInfo ), descriptorSets.data() ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + this->reserve( allocateInfo.descriptorSetCount ); + for ( auto const & descriptorSet : descriptorSets ) + { + this->emplace_back( device, descriptorSet, static_cast( allocateInfo.descriptorPool ) ); + } + } + else + { + detail::throwResultException( result, "vkAllocateDescriptorSets" ); + } + } + + DescriptorSets( std::nullptr_t ) {} + + DescriptorSets() = delete; + DescriptorSets( DescriptorSets const & ) = delete; + DescriptorSets( DescriptorSets && rhs ) = default; + DescriptorSets & operator=( DescriptorSets const & ) = delete; + DescriptorSets & operator=( DescriptorSets && rhs ) = default; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + using CppType = vk::DescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateDescriptorSetLayout( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_descriptorSetLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDescriptorSetLayout" ); + } + } + + DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorSetLayout descriptorSetLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_descriptorSetLayout( descriptorSetLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DescriptorSetLayout( std::nullptr_t ) {} + + ~DescriptorSetLayout() + { + clear(); + } + + DescriptorSetLayout() = delete; + DescriptorSetLayout( DescriptorSetLayout const & ) = delete; + + DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DescriptorSetLayout & operator=( DescriptorSetLayout const & ) = delete; + + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_descriptorSetLayout ) + { + getDispatcher()->vkDestroyDescriptorSetLayout( static_cast( m_device ), + static_cast( m_descriptorSetLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_descriptorSetLayout = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DescriptorSetLayout release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorSetLayout, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_descriptor_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getSizeEXT() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + using CppType = vk::DescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + public: + DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateDescriptorUpdateTemplate( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_descriptorUpdateTemplate ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDescriptorUpdateTemplate" ); + } + } + + DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DescriptorUpdateTemplate( std::nullptr_t ) {} + + ~DescriptorUpdateTemplate() + { + clear(); + } + + DescriptorUpdateTemplate() = delete; + DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete; + + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & ) = delete; + + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_descriptorUpdateTemplate ) + { + getDispatcher()->vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), + static_cast( m_descriptorUpdateTemplate ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_descriptorUpdateTemplate = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorUpdateTemplate, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DeviceMemory + { + public: + using CType = VkDeviceMemory; + using CppType = vk::DeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + + public: + DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkAllocateMemory( static_cast( *device ), + reinterpret_cast( &allocateInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_memory ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkAllocateMemory" ); + } + } + + DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkDeviceMemory memory, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_memory( memory ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + DeviceMemory( std::nullptr_t ) {} + + ~DeviceMemory() + { + clear(); + } + + DeviceMemory() = delete; + DeviceMemory( DeviceMemory const & ) = delete; + + DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_memory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DeviceMemory & operator=( DeviceMemory const & ) = delete; + + DeviceMemory & operator=( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_memory, rhs.m_memory ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DeviceMemory const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_memory; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_memory ) + { + getDispatcher()->vkFreeMemory( + static_cast( m_device ), static_cast( m_memory ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_memory = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DeviceMemory release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_memory, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_memory, rhs.m_memory ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD void * mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + void unmapMemory() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getCommitment() const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_pageable_device_local_memory === + + void setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + using CppType = vk::DisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + + public: + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, int32_t drmFd, uint32_t connectorId ) + : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( physicalDevice.getDispatcher()->vkGetDrmDisplayEXT( + static_cast( *physicalDevice ), drmFd, connectorId, reinterpret_cast( &m_display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkGetDrmDisplayEXT" ); + } + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, Display & dpy, RROutput rrOutput ) + : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( physicalDevice.getDispatcher()->vkGetRandROutputDisplayEXT( + static_cast( *physicalDevice ), &dpy, rrOutput, reinterpret_cast( &m_display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkGetRandROutputDisplayEXT" ); + } + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t deviceRelativeId ) + : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( physicalDevice.getDispatcher()->vkGetWinrtDisplayNV( + static_cast( *physicalDevice ), deviceRelativeId, reinterpret_cast( &m_display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkGetWinrtDisplayNV" ); + } + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display ) + : m_physicalDevice( *physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() ) + { + } + + DisplayKHR( std::nullptr_t ) {} + + ~DisplayKHR() + { + clear(); + } + + DisplayKHR() = delete; + DisplayKHR( DisplayKHR const & ) = delete; + + DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_display( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DisplayKHR & operator=( DisplayKHR const & ) = delete; + + DisplayKHR & operator=( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_display, rhs.m_display ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DisplayKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_display; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_display ) + { + getDispatcher()->vkReleaseDisplayEXT( static_cast( m_physicalDevice ), static_cast( m_display ) ); + } + m_physicalDevice = nullptr; + m_display = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DisplayKHR release() + { + m_physicalDevice = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_display, nullptr ); + } + + VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const + { + return m_physicalDevice; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_display, rhs.m_display ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD std::vector getModeProperties() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR + createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + //=== VK_KHR_get_display_properties2 === + + VULKAN_HPP_NODISCARD std::vector getModeProperties2() const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + void acquireWinrtNV() const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class DisplayKHRs : public std::vector + { + public: + DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t planeIndex ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = physicalDevice.getDispatcher(); + std::vector displays; + uint32_t displayCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast( *physicalDevice ), planeIndex, &displayCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast( *physicalDevice ), planeIndex, &displayCount, displays.data() ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + this->reserve( displayCount ); + for ( auto const & displayKHR : displays ) + { + this->emplace_back( physicalDevice, displayKHR ); + } + } + else + { + detail::throwResultException( result, "vkGetDisplayPlaneSupportedDisplaysKHR" ); + } + } + + DisplayKHRs( std::nullptr_t ) {} + + DisplayKHRs() = delete; + DisplayKHRs( DisplayKHRs const & ) = delete; + DisplayKHRs( DisplayKHRs && rhs ) = default; + DisplayKHRs & operator=( DisplayKHRs const & ) = delete; + DisplayKHRs & operator=( DisplayKHRs && rhs ) = default; + }; + + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + using CppType = vk::DisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + public: + DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_physicalDevice( display.getPhysicalDevice() ), m_dispatcher( display.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( display.getDispatcher()->vkCreateDisplayModeKHR( + static_cast( display.getPhysicalDevice() ), + static_cast( *display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &m_displayModeKHR ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDisplayModeKHR" ); + } + } + + DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VkDisplayModeKHR displayModeKHR ) + : m_physicalDevice( display.getPhysicalDevice() ), m_displayModeKHR( displayModeKHR ), m_dispatcher( display.getDispatcher() ) + { + } + + DisplayModeKHR( std::nullptr_t ) {} + + ~DisplayModeKHR() + { + clear(); + } + + DisplayModeKHR() = delete; + + DisplayModeKHR( DisplayModeKHR const & rhs ) : m_displayModeKHR( rhs.m_displayModeKHR ), m_dispatcher( rhs.m_dispatcher ) {} + + DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_displayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DisplayModeKHR & operator=( DisplayModeKHR const & rhs ) + { + m_displayModeKHR = rhs.m_displayModeKHR; + m_dispatcher = rhs.m_dispatcher; + return *this; + } + + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_displayModeKHR, rhs.m_displayModeKHR ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DisplayModeKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = nullptr; + m_displayModeKHR = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DisplayModeKHR release() + { + m_physicalDevice = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_displayModeKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_displayModeKHR, rhs.m_displayModeKHR ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const; + + private: + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class Event + { + public: + using CType = VkEvent; + using CppType = vk::Event; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + + public: + Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateEvent( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_event ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateEvent" ); + } + } + + Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkEvent event, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_event( event ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Event( std::nullptr_t ) {} + + ~Event() + { + clear(); + } + + Event() = delete; + Event( Event const & ) = delete; + + Event( Event && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Event & operator=( Event const & ) = delete; + + Event & operator=( Event && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_event, rhs.m_event ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Event const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_event ) + { + getDispatcher()->vkDestroyEvent( + static_cast( m_device ), static_cast( m_event ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_event = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Event release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_event, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_event, rhs.m_event ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + void set() const; + + void reset() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Event m_event = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Fence + { + public: + using CType = VkFence; + using CppType = vk::Fence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + + public: + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateFence( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateFence" ); + } + } + + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkRegisterDeviceEventEXT( static_cast( *device ), + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkRegisterDeviceEventEXT" ); + } + } + + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkRegisterDisplayEventEXT( static_cast( *device ), + static_cast( *display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkRegisterDisplayEventEXT" ); + } + } + + Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkFence fence, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_fence( fence ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Fence( std::nullptr_t ) {} + + ~Fence() + { + clear(); + } + + Fence() = delete; + Fence( Fence const & ) = delete; + + Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Fence & operator=( Fence const & ) = delete; + + Fence & operator=( Fence && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_fence, rhs.m_fence ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Fence const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_fence ) + { + getDispatcher()->vkDestroyFence( + static_cast( m_device ), static_cast( m_fence ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_fence = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Fence release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_fence, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_fence, rhs.m_fence ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Fence m_fence = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + using CppType = vk::Framebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateFramebuffer( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_framebuffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateFramebuffer" ); + } + } + + Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkFramebuffer framebuffer, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_framebuffer( framebuffer ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Framebuffer( std::nullptr_t ) {} + + ~Framebuffer() + { + clear(); + } + + Framebuffer() = delete; + Framebuffer( Framebuffer const & ) = delete; + + Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Framebuffer & operator=( Framebuffer const & ) = delete; + + Framebuffer & operator=( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_framebuffer, rhs.m_framebuffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Framebuffer const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_framebuffer ) + { + getDispatcher()->vkDestroyFramebuffer( + static_cast( m_device ), static_cast( m_framebuffer ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_framebuffer = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Framebuffer release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_framebuffer, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_framebuffer, rhs.m_framebuffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_QCOM_tile_properties === + + VULKAN_HPP_NODISCARD std::vector getTilePropertiesQCOM() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Image + { + public: + using CType = VkImage; + using CppType = vk::Image; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + + public: + Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateImage( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_image ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateImage" ); + } + } + + Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkImage image, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_image( image ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Image( std::nullptr_t ) {} + + ~Image() + { + clear(); + } + + Image() = delete; + Image( Image const & ) = delete; + + Image( Image && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Image & operator=( Image const & ) = delete; + + Image & operator=( Image && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_image, rhs.m_image ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Image const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_image ) + { + getDispatcher()->vkDestroyImage( + static_cast( m_device ), static_cast( m_image ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_image = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Image release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_image, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_image, rhs.m_image ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD std::vector getSparseMemoryRequirements() const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout + getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_image_drm_format_modifier === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const; + + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Image m_image = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class ImageView + { + public: + using CType = VkImageView; + using CppType = vk::ImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + + public: + ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateImageView( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_imageView ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateImageView" ); + } + } + + ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkImageView imageView, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_imageView( imageView ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ImageView( std::nullptr_t ) {} + + ~ImageView() + { + clear(); + } + + ImageView() = delete; + ImageView( ImageView const & ) = delete; + + ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_imageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ImageView & operator=( ImageView const & ) = delete; + + ImageView & operator=( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_imageView, rhs.m_imageView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ImageView const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_imageView ) + { + getDispatcher()->vkDestroyImageView( + static_cast( m_device ), static_cast( m_imageView ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_imageView = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ImageView release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_imageView, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_imageView, rhs.m_imageView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NVX_image_view_handle === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + using CppType = vk::IndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateIndirectCommandsLayoutNV( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_indirectCommandsLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateIndirectCommandsLayoutNV" ); + } + } + + IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_indirectCommandsLayout( indirectCommandsLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + IndirectCommandsLayoutNV( std::nullptr_t ) {} + + ~IndirectCommandsLayoutNV() + { + clear(); + } + + IndirectCommandsLayoutNV() = delete; + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete; + + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete; + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayout; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_indirectCommandsLayout ) + { + getDispatcher()->vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( m_indirectCommandsLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_indirectCommandsLayout = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class MicromapEXT + { + public: + using CType = VkMicromapEXT; + using CppType = vk::MicromapEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateMicromapEXT( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_micromap ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateMicromapEXT" ); + } + } + + MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkMicromapEXT micromap, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_micromap( micromap ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + MicromapEXT( std::nullptr_t ) {} + + ~MicromapEXT() + { + clear(); + } + + MicromapEXT() = delete; + MicromapEXT( MicromapEXT const & ) = delete; + + MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_micromap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + MicromapEXT & operator=( MicromapEXT const & ) = delete; + + MicromapEXT & operator=( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_micromap, rhs.m_micromap ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::MicromapEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_micromap; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_micromap ) + { + getDispatcher()->vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( m_micromap ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_micromap = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::MicromapEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_micromap, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_micromap, rhs.m_micromap ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class OpticalFlowSessionNV + { + public: + using CType = VkOpticalFlowSessionNV; + using CppType = vk::OpticalFlowSessionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateOpticalFlowSessionNV( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_session ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateOpticalFlowSessionNV" ); + } + } + + OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkOpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_session( session ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + OpticalFlowSessionNV( std::nullptr_t ) {} + + ~OpticalFlowSessionNV() + { + clear(); + } + + OpticalFlowSessionNV() = delete; + OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete; + + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_session( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & ) = delete; + + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_session, rhs.m_session ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_session; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_session ) + { + getDispatcher()->vkDestroyOpticalFlowSessionNV( static_cast( m_device ), + static_cast( m_session ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_session = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_session, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_session, rhs.m_session ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NV_optical_flow === + + void bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; + using CppType = vk::PerformanceConfigurationINTEL; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) + : m_device( *device ), m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast( *device ), + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &m_configuration ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkAcquirePerformanceConfigurationINTEL" ); + } + } + + PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration ) + : m_device( *device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() ) + { + } + + PerformanceConfigurationINTEL( std::nullptr_t ) {} + + ~PerformanceConfigurationINTEL() + { + clear(); + } + + PerformanceConfigurationINTEL() = delete; + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete; + + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_configuration( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete; + + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_configuration, rhs.m_configuration ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_configuration; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_configuration ) + { + getDispatcher()->vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), + static_cast( m_configuration ) ); + } + m_device = nullptr; + m_configuration = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL release() + { + m_device = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_configuration, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_configuration, rhs.m_configuration ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class PipelineCache + { + public: + using CType = VkPipelineCache; + using CppType = vk::PipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + + public: + PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreatePipelineCache( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipelineCache ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreatePipelineCache" ); + } + } + + PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_pipelineCache( pipelineCache ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + PipelineCache( std::nullptr_t ) {} + + ~PipelineCache() + { + clear(); + } + + PipelineCache() = delete; + PipelineCache( PipelineCache const & ) = delete; + + PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PipelineCache & operator=( PipelineCache const & ) = delete; + + PipelineCache & operator=( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineCache, rhs.m_pipelineCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineCache const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_pipelineCache ) + { + getDispatcher()->vkDestroyPipelineCache( static_cast( m_device ), + static_cast( m_pipelineCache ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipelineCache = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineCache release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipelineCache, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineCache, rhs.m_pipelineCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD std::vector getData() const; + + void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Pipeline + { + public: + using CType = VkPipeline; + using CppType = vk::Pipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + + public: + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = static_cast( + getDispatcher()->vkCreateComputePipelines( static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + detail::throwResultException( m_constructorSuccessCode, "vkCreateComputePipelines" ); + } + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = static_cast( + getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + detail::throwResultException( m_constructorSuccessCode, "vkCreateExecutionGraphPipelinesAMDX" ); + } + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = static_cast( + getDispatcher()->vkCreateGraphicsPipelines( static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + detail::throwResultException( m_constructorSuccessCode, "vkCreateGraphicsPipelines" ); + } + } + + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = static_cast( + getDispatcher()->vkCreateRayTracingPipelinesKHR( static_cast( *device ), + deferredOperation ? static_cast( **deferredOperation ) : 0, + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + detail::throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesKHR" ); + } + } + + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = static_cast( + getDispatcher()->vkCreateRayTracingPipelinesNV( static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + detail::throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesNV" ); + } + } + + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipeline pipeline, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( *device ) + , m_pipeline( pipeline ) + , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Pipeline( std::nullptr_t ) {} + + ~Pipeline() + { + clear(); + } + + Pipeline() = delete; + Pipeline( Pipeline const & ) = delete; + + Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Pipeline & operator=( Pipeline const & ) = delete; + + Pipeline & operator=( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipeline, rhs.m_pipeline ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Pipeline const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_pipeline ) + { + getDispatcher()->vkDestroyPipeline( + static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipeline = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Pipeline release() + { + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipeline, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipeline, rhs.m_pipeline ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_AMD_shader_info === + + VULKAN_HPP_NODISCARD std::vector getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX getExecutionGraphScratchSizeAMDX() const; + + VULKAN_HPP_NODISCARD uint32_t getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_NODISCARD std::vector getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + template + VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; + + template + VULKAN_HPP_NODISCARD std::vector + getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + template + VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD std::vector getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; + + template + VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const; + + void compileDeferredNV( uint32_t shader ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Pipelines : public std::vector + { + public: + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateComputePipelines( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + detail::throwResultException( result, "vkCreateComputePipelines" ); + } + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateExecutionGraphPipelinesAMDX( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + detail::throwResultException( result, "vkCreateExecutionGraphPipelinesAMDX" ); + } + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateGraphicsPipelines( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + detail::throwResultException( result, "vkCreateGraphicsPipelines" ); + } + } + + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateRayTracingPipelinesKHR( + static_cast( *device ), + deferredOperation ? static_cast( **deferredOperation ) : 0, + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || + ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + detail::throwResultException( result, "vkCreateRayTracingPipelinesKHR" ); + } + } + + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateRayTracingPipelinesNV( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + detail::throwResultException( result, "vkCreateRayTracingPipelinesNV" ); + } + } + + Pipelines( std::nullptr_t ) {} + + Pipelines() = delete; + Pipelines( Pipelines const & ) = delete; + Pipelines( Pipelines && rhs ) = default; + Pipelines & operator=( Pipelines const & ) = delete; + Pipelines & operator=( Pipelines && rhs ) = default; + }; + + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + using CppType = vk::PipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + + public: + PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreatePipelineLayout( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipelineLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreatePipelineLayout" ); + } + } + + PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineLayout pipelineLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_pipelineLayout( pipelineLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + PipelineLayout( std::nullptr_t ) {} + + ~PipelineLayout() + { + clear(); + } + + PipelineLayout() = delete; + PipelineLayout( PipelineLayout const & ) = delete; + + PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PipelineLayout & operator=( PipelineLayout const & ) = delete; + + PipelineLayout & operator=( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineLayout, rhs.m_pipelineLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineLayout const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_pipelineLayout ) + { + getDispatcher()->vkDestroyPipelineLayout( static_cast( m_device ), + static_cast( m_pipelineLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipelineLayout = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineLayout release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipelineLayout, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineLayout, rhs.m_pipelineLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class PrivateDataSlot + { + public: + using CType = VkPrivateDataSlot; + using CppType = vk::PrivateDataSlot; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreatePrivateDataSlot( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_privateDataSlot ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreatePrivateDataSlot" ); + } + } + + PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPrivateDataSlot privateDataSlot, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_privateDataSlot( privateDataSlot ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + PrivateDataSlot( std::nullptr_t ) {} + + ~PrivateDataSlot() + { + clear(); + } + + PrivateDataSlot() = delete; + PrivateDataSlot( PrivateDataSlot const & ) = delete; + + PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_privateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete; + + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_privateDataSlot, rhs.m_privateDataSlot ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PrivateDataSlot const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_privateDataSlot ) + { + getDispatcher()->vkDestroyPrivateDataSlot( static_cast( m_device ), + static_cast( m_privateDataSlot ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_privateDataSlot = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PrivateDataSlot release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_privateDataSlot, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_privateDataSlot, rhs.m_privateDataSlot ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class QueryPool + { + public: + using CType = VkQueryPool; + using CppType = vk::QueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + + public: + QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateQueryPool( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_queryPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateQueryPool" ); + } + } + + QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkQueryPool queryPool, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_queryPool( queryPool ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + QueryPool( std::nullptr_t ) {} + + ~QueryPool() + { + clear(); + } + + QueryPool() = delete; + QueryPool( QueryPool const & ) = delete; + + QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_queryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + QueryPool & operator=( QueryPool const & ) = delete; + + QueryPool & operator=( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_queryPool, rhs.m_queryPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::QueryPool const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_queryPool ) + { + getDispatcher()->vkDestroyQueryPool( + static_cast( m_device ), static_cast( m_queryPool ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_queryPool = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::QueryPool release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_queryPool, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_queryPool, rhs.m_queryPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD std::pair> + getResults( uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + template + VULKAN_HPP_NODISCARD std::pair + getResult( uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_2 === + + void reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_host_query_reset === + + void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Queue + { + public: + using CType = VkQueue; + using CppType = vk::Queue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, uint32_t queueFamilyIndex, uint32_t queueIndex ) + : m_dispatcher( device.getDispatcher() ) + { + getDispatcher()->vkGetDeviceQueue( static_cast( *device ), queueFamilyIndex, queueIndex, reinterpret_cast( &m_queue ) ); + } + + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) + : m_dispatcher( device.getDispatcher() ) + { + getDispatcher()->vkGetDeviceQueue2( + static_cast( *device ), reinterpret_cast( &queueInfo ), reinterpret_cast( &m_queue ) ); + } + + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue ) : m_queue( queue ), m_dispatcher( device.getDispatcher() ) + { + } + + Queue( std::nullptr_t ) {} + + ~Queue() + { + clear(); + } + + Queue() = delete; + + Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {} + + Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT + : m_queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Queue & operator=( Queue const & rhs ) + { + m_queue = rhs.m_queue; + m_dispatcher = rhs.m_dispatcher; + return *this; + } + + Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_queue, rhs.m_queue ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Queue const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + m_queue = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Queue release() + { + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_queue, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_queue, rhs.m_queue ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + void submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + void waitIdle() const; + + void bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_3 === + + void submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const; + + //=== VK_EXT_debug_utils === + + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT; + + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_diagnostic_checkpoints === + + VULKAN_HPP_NODISCARD std::vector getCheckpointDataNV() const; + + //=== VK_INTEL_performance_query === + + void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const; + + //=== VK_KHR_synchronization2 === + + void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV() const; + + //=== VK_NV_low_latency2 === + + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Queue m_queue = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class RenderPass + { + public: + using CType = VkRenderPass; + using CppType = vk::RenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateRenderPass( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateRenderPass" ); + } + } + + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateRenderPass2( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateRenderPass2" ); + } + } + + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkRenderPass renderPass, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_renderPass( renderPass ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + RenderPass( std::nullptr_t ) {} + + ~RenderPass() + { + clear(); + } + + RenderPass() = delete; + RenderPass( RenderPass const & ) = delete; + + RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_renderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + RenderPass & operator=( RenderPass const & ) = delete; + + RenderPass & operator=( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_renderPass, rhs.m_renderPass ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::RenderPass const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_renderPass ) + { + getDispatcher()->vkDestroyRenderPass( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_renderPass = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::RenderPass release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_renderPass, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_renderPass, rhs.m_renderPass ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_subpass_shading === + + VULKAN_HPP_NODISCARD std::pair getSubpassShadingMaxWorkgroupSizeHUAWEI() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Sampler + { + public: + using CType = VkSampler; + using CppType = vk::Sampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateSampler( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_sampler ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateSampler" ); + } + } + + Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSampler sampler, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_sampler( sampler ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Sampler( std::nullptr_t ) {} + + ~Sampler() + { + clear(); + } + + Sampler() = delete; + Sampler( Sampler const & ) = delete; + + Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Sampler & operator=( Sampler const & ) = delete; + + Sampler & operator=( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_sampler, rhs.m_sampler ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Sampler const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_sampler ) + { + getDispatcher()->vkDestroySampler( + static_cast( m_device ), static_cast( m_sampler ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_sampler = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Sampler release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_sampler, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_sampler, rhs.m_sampler ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + using CppType = vk::SamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateSamplerYcbcrConversion( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_ycbcrConversion ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateSamplerYcbcrConversion" ); + } + } + + SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSamplerYcbcrConversion ycbcrConversion, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_ycbcrConversion( ycbcrConversion ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + SamplerYcbcrConversion( std::nullptr_t ) {} + + ~SamplerYcbcrConversion() + { + clear(); + } + + SamplerYcbcrConversion() = delete; + SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete; + + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_ycbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete; + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_ycbcrConversion; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_ycbcrConversion ) + { + getDispatcher()->vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( m_ycbcrConversion ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_ycbcrConversion = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_ycbcrConversion, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class Semaphore + { + public: + using CType = VkSemaphore; + using CppType = vk::Semaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + + public: + Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( device.getDispatcher()->vkCreateSemaphore( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_semaphore ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateSemaphore" ); + } + } + + Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSemaphore semaphore, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_semaphore( semaphore ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + Semaphore( std::nullptr_t ) {} + + ~Semaphore() + { + clear(); + } + + Semaphore() = delete; + Semaphore( Semaphore const & ) = delete; + + Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Semaphore & operator=( Semaphore const & ) = delete; + + Semaphore & operator=( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_semaphore, rhs.m_semaphore ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::Semaphore const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_semaphore ) + { + getDispatcher()->vkDestroySemaphore( + static_cast( m_device ), static_cast( m_semaphore ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_semaphore = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Semaphore release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_semaphore, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_semaphore, rhs.m_semaphore ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_NODISCARD uint64_t getCounterValue() const; + + //=== VK_KHR_timeline_semaphore === + + VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class ShaderEXT + { + public: + using CType = VkShaderEXT; + using CppType = vk::ShaderEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCreateShadersEXT( static_cast( *device ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_shader ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateShadersEXT" ); + } + } + + ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkShaderEXT shader, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_shader( shader ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ShaderEXT( std::nullptr_t ) {} + + ~ShaderEXT() + { + clear(); + } + + ShaderEXT() = delete; + ShaderEXT( ShaderEXT const & ) = delete; + + ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shader( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ShaderEXT & operator=( ShaderEXT const & ) = delete; + + ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shader, rhs.m_shader ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ShaderEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_shader; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_shader ) + { + getDispatcher()->vkDestroyShaderEXT( + static_cast( m_device ), static_cast( m_shader ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_shader = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ShaderEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shader, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shader, rhs.m_shader ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_NODISCARD std::vector getBinaryData() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class ShaderEXTs : public std::vector + { + public: + ShaderEXTs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateShadersEXT( + static_cast( *device ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + shaders.data() ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + this->reserve( createInfos.size() ); + for ( auto const & shaderEXT : shaders ) + { + this->emplace_back( device, shaderEXT, allocator ); + } + } + else + { + detail::throwResultException( result, "vkCreateShadersEXT" ); + } + } + + ShaderEXTs( std::nullptr_t ) {} + + ShaderEXTs() = delete; + ShaderEXTs( ShaderEXTs const & ) = delete; + ShaderEXTs( ShaderEXTs && rhs ) = default; + ShaderEXTs & operator=( ShaderEXTs const & ) = delete; + ShaderEXTs & operator=( ShaderEXTs && rhs ) = default; + }; + + class ShaderModule + { + public: + using CType = VkShaderModule; + using CppType = vk::ShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateShaderModule( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_shaderModule ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateShaderModule" ); + } + } + + ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_shaderModule( shaderModule ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ShaderModule( std::nullptr_t ) {} + + ~ShaderModule() + { + clear(); + } + + ShaderModule() = delete; + ShaderModule( ShaderModule const & ) = delete; + + ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ShaderModule & operator=( ShaderModule const & ) = delete; + + ShaderModule & operator=( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shaderModule, rhs.m_shaderModule ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ShaderModule const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_shaderModule ) + { + getDispatcher()->vkDestroyShaderModule( + static_cast( m_device ), static_cast( m_shaderModule ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_shaderModule = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ShaderModule release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shaderModule, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_shaderModule, rhs.m_shaderModule ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_shader_module_identifier === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + using CppType = vk::SurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + public: +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateAndroidSurfaceKHR( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateAndroidSurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateDirectFBSurfaceEXT( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDirectFBSurfaceEXT" ); + } + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateDisplayPlaneSurfaceKHR" ); + } + } + + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateHeadlessSurfaceEXT( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateHeadlessSurfaceEXT" ); + } + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateIOSSurfaceMVK( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateIOSSurfaceMVK" ); + } + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateImagePipeSurfaceFUCHSIA" ); + } + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateMacOSSurfaceMVK( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateMacOSSurfaceMVK" ); + } + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateMetalSurfaceEXT( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateMetalSurfaceEXT" ); + } + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateScreenSurfaceQNX( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateScreenSurfaceQNX" ); + } + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +# if defined( VK_USE_PLATFORM_GGP ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateStreamDescriptorSurfaceGGP" ); + } + } +# endif /*VK_USE_PLATFORM_GGP*/ + +# if defined( VK_USE_PLATFORM_VI_NN ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateViSurfaceNN( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateViSurfaceNN" ); + } + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateWaylandSurfaceKHR( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateWaylandSurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateWin32SurfaceKHR( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateWin32SurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateXcbSurfaceKHR( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateXcbSurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + instance.getDispatcher()->vkCreateXlibSurfaceKHR( static_cast( *instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateXlibSurfaceKHR" ); + } + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkSurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( *instance ) + , m_surface( surface ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) + { + } + + SurfaceKHR( std::nullptr_t ) {} + + ~SurfaceKHR() + { + clear(); + } + + SurfaceKHR() = delete; + SurfaceKHR( SurfaceKHR const & ) = delete; + + SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_surface( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + SurfaceKHR & operator=( SurfaceKHR const & ) = delete; + + SurfaceKHR & operator=( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_surface, rhs.m_surface ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SurfaceKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_surface; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_surface ) + { + getDispatcher()->vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( m_surface ), reinterpret_cast( m_allocator ) ); + } + m_instance = nullptr; + m_surface = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::SurfaceKHR release() + { + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_surface, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_surface, rhs.m_surface ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + using CppType = vk::SwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + public: + SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateSwapchainKHR( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_swapchain ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateSwapchainKHR" ); + } + } + + SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_swapchain( swapchain ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + SwapchainKHR( std::nullptr_t ) {} + + ~SwapchainKHR() + { + clear(); + } + + SwapchainKHR() = delete; + SwapchainKHR( SwapchainKHR const & ) = delete; + + SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_swapchain( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + SwapchainKHR & operator=( SwapchainKHR const & ) = delete; + + SwapchainKHR & operator=( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_swapchain, rhs.m_swapchain ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::SwapchainKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_swapchain; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_swapchain ) + { + getDispatcher()->vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_swapchain = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::SwapchainKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_swapchain, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_swapchain, rhs.m_swapchain ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD std::vector getImages() const; + + VULKAN_HPP_NODISCARD std::pair + acquireNextImage( uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_EXT_display_control === + + VULKAN_HPP_NODISCARD uint64_t getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const; + + //=== VK_GOOGLE_display_timing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE() const; + + VULKAN_HPP_NODISCARD std::vector getPastPresentationTimingGOOGLE() const; + + //=== VK_KHR_shared_presentable_image === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; + + //=== VK_AMD_display_native_hdr === + + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_present_wait === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent( uint64_t presentId, uint64_t timeout ) const; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + void acquireFullScreenExclusiveModeEXT() const; + + void releaseFullScreenExclusiveModeEXT() const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_NV_low_latency2 === + + void setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const; + + void latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const; + + void setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV getLatencyTimingsNV() const VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class SwapchainKHRs : public std::vector + { + public: + SwapchainKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateSharedSwapchainsKHR( + static_cast( *device ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + swapchains.data() ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + this->reserve( createInfos.size() ); + for ( auto const & swapchainKHR : swapchains ) + { + this->emplace_back( device, swapchainKHR, allocator ); + } + } + else + { + detail::throwResultException( result, "vkCreateSharedSwapchainsKHR" ); + } + } + + SwapchainKHRs( std::nullptr_t ) {} + + SwapchainKHRs() = delete; + SwapchainKHRs( SwapchainKHRs const & ) = delete; + SwapchainKHRs( SwapchainKHRs && rhs ) = default; + SwapchainKHRs & operator=( SwapchainKHRs const & ) = delete; + SwapchainKHRs & operator=( SwapchainKHRs && rhs ) = default; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + using CppType = vk::ValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateValidationCacheEXT( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_validationCache ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateValidationCacheEXT" ); + } + } + + ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkValidationCacheEXT validationCache, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_validationCache( validationCache ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + ValidationCacheEXT( std::nullptr_t ) {} + + ~ValidationCacheEXT() + { + clear(); + } + + ValidationCacheEXT() = delete; + ValidationCacheEXT( ValidationCacheEXT const & ) = delete; + + ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_validationCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete; + + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_validationCache, rhs.m_validationCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_validationCache; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_validationCache ) + { + getDispatcher()->vkDestroyValidationCacheEXT( static_cast( m_device ), + static_cast( m_validationCache ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_validationCache = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_validationCache, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_validationCache, rhs.m_validationCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_validation_cache === + + void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; + + VULKAN_HPP_NODISCARD std::vector getData() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + using CppType = vk::VideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateVideoSessionKHR( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_videoSession ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateVideoSessionKHR" ); + } + } + + VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkVideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_videoSession( videoSession ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + VideoSessionKHR( std::nullptr_t ) {} + + ~VideoSessionKHR() + { + clear(); + } + + VideoSessionKHR() = delete; + VideoSessionKHR( VideoSessionKHR const & ) = delete; + + VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSession( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete; + + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSession, rhs.m_videoSession ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VideoSessionKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_videoSession; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_videoSession ) + { + getDispatcher()->vkDestroyVideoSessionKHR( static_cast( m_device ), + static_cast( m_videoSession ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_videoSession = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::VideoSessionKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_videoSession, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSession, rhs.m_videoSession ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD std::vector getMemoryRequirements() const; + + void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + using CppType = vk::VideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + VULKAN_HPP_NAMESPACE::Result result = static_cast( + device.getDispatcher()->vkCreateVideoSessionParametersKHR( static_cast( *device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_videoSessionParameters ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + detail::throwResultException( result, "vkCreateVideoSessionParametersKHR" ); + } + } + + VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkVideoSessionParametersKHR videoSessionParameters, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_videoSessionParameters( videoSessionParameters ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + VideoSessionParametersKHR( std::nullptr_t ) {} + + ~VideoSessionParametersKHR() + { + clear(); + } + + VideoSessionParametersKHR() = delete; + VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete; + + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSessionParameters( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete; + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParameters; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_videoSessionParameters ) + { + getDispatcher()->vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( m_videoSessionParameters ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_videoSessionParameters = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_videoSessionParameters, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_video_queue === + + void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Instance + Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Instance( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDevices() const + { + return VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices( *this ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + + return imageFormatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + getDispatcher()->vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties && + "Function requires " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetInstanceProcAddr && "Function requires " ); + + PFN_vkVoidFunction result = getDispatcher()->vkGetInstanceProcAddr( static_cast( m_instance ), name.c_str() ); + + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceProcAddr && "Function requires " ); + + PFN_vkVoidFunction result = getDispatcher()->vkGetDeviceProcAddr( static_cast( m_device ), name.c_str() ); + + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Device + PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Device( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Context::enumerateInstanceExtensionProperties( Optional layerName ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceExtensionProperties && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceExtensionProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkEnumerateDeviceExtensionProperties( + static_cast( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast( m_physicalDevice ), + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Context::enumerateInstanceLayerProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceLayerProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::enumerateDeviceLayerProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceLayerProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast( m_physicalDevice ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkEnumerateDeviceLayerProperties( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueFamilyIndex, queueIndex ); + } + + VULKAN_HPP_INLINE void Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit && "Function requires " ); + + VkResult result = getDispatcher()->vkQueueSubmit( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + } + + VULKAN_HPP_INLINE void Queue::waitIdle() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueWaitIdle && "Function requires " ); + + VkResult result = getDispatcher()->vkQueueWaitIdle( static_cast( m_queue ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + } + + VULKAN_HPP_INLINE void Device::waitIdle() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDeviceWaitIdle && "Function requires " ); + + VkResult result = getDispatcher()->vkDeviceWaitIdle( static_cast( m_device ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeviceMemory + Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, allocateInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory && "Function requires " ); + + void * pData; + VkResult result = getDispatcher()->vkMapMemory( static_cast( m_device ), + static_cast( m_memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" ); + + return pData; + } + + VULKAN_HPP_INLINE void DeviceMemory::unmapMemory() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory && "Function requires " ); + + getDispatcher()->vkUnmapMemory( static_cast( m_device ), static_cast( m_memory ) ); + } + + VULKAN_HPP_INLINE void + Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkFlushMappedMemoryRanges && "Function requires " ); + + VkResult result = getDispatcher()->vkFlushMappedMemoryRanges( + static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + } + + VULKAN_HPP_INLINE void + Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkInvalidateMappedMemoryRanges && "Function requires " ); + + VkResult result = getDispatcher()->vkInvalidateMappedMemoryRanges( + static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryCommitment && "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + getDispatcher()->vkGetDeviceMemoryCommitment( + static_cast( m_device ), static_cast( m_memory ), reinterpret_cast( &committedMemoryInBytes ) ); + + return committedMemoryInBytes; + } + + VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory && "Function requires " ); + + VkResult result = getDispatcher()->vkBindBufferMemory( static_cast( m_device ), + static_cast( m_buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" ); + } + + VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory && "Function requires " ); + + VkResult result = getDispatcher()->vkBindImageMemory( + static_cast( m_device ), static_cast( m_image ), static_cast( memory ), static_cast( memoryOffset ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements( + static_cast( m_device ), static_cast( m_buffer ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements( + static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Image::getSparseMemoryRequirements() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements && "Function requires " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements( + static_cast( m_device ), static_cast( m_image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast( m_device ), + static_cast( m_image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_INLINE void Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBindSparse && "Function requires " ); + + VkResult result = getDispatcher()->vkQueueBindSparse( + static_cast( m_queue ), bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence + Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetFences && "Function requires " ); + + VkResult result = getDispatcher()->vkResetFences( static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceStatus && "Function requires " ); + + VkResult result = getDispatcher()->vkGetFenceStatus( static_cast( m_device ), static_cast( m_fence ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( + VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForFences && "Function requires " ); + + VkResult result = getDispatcher()->vkWaitForFences( + static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Semaphore + Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Event + Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Event( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEventStatus && "Function requires " ); + + VkResult result = getDispatcher()->vkGetEventStatus( static_cast( m_device ), static_cast( m_event ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus", + { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + + return static_cast( result ); + } + + VULKAN_HPP_INLINE void Event::set() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetEvent && "Function requires " ); + + VkResult result = getDispatcher()->vkSetEvent( static_cast( m_device ), static_cast( m_event ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::set" ); + } + + VULKAN_HPP_INLINE void Event::reset() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetEvent && "Function requires " ); + + VkResult result = getDispatcher()->vkResetEvent( static_cast( m_device ), static_cast( m_event ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::QueryPool + Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, createInfo, allocator ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> QueryPool::getResults( + uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), + static_cast( m_queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return std::make_pair( static_cast( result ), data ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair QueryPool::getResult( + uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), + static_cast( m_queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return std::make_pair( static_cast( result ), data ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Buffer + Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferView + Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Image + Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Image( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout + Image::getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout && "Function requires " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + getDispatcher()->vkGetImageSubresourceLayout( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ImageView + Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderModule + Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineCache + Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PipelineCache::getData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineCacheData && "Function requires " ); + + std::vector data; + size_t dataSize; + VkResult result; + do + { + result = + getDispatcher()->vkGetPipelineCacheData( static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = getDispatcher()->vkGetPipelineCacheData( + static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; + } + + VULKAN_HPP_INLINE void PipelineCache::merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMergePipelineCaches && "Function requires " ); + + VkResult result = getDispatcher()->vkMergePipelineCaches( static_cast( m_device ), + static_cast( m_pipelineCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createGraphicsPipeline( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineLayout + Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Sampler + Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout + Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorPool + Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetDescriptorPool && "Function requires " ); + + getDispatcher()->vkResetDescriptorPool( + static_cast( m_device ), static_cast( m_descriptorPool ), static_cast( flags ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorSets( *this, allocateInfo ); + } + + VULKAN_HPP_INLINE void Device::updateDescriptorSets( + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSets && "Function requires " ); + + getDispatcher()->vkUpdateDescriptorSets( static_cast( m_device ), + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Framebuffer + Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass + Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderAreaGranularity && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderAreaGranularity( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &granularity ) ); + + return granularity; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CommandPool + Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandPool && "Function requires " ); + + VkResult result = getDispatcher()->vkResetCommandPool( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CommandBuffers( *this, allocateInfo ); + } + + VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBeginCommandBuffer && "Function requires " ); + + VkResult result = getDispatcher()->vkBeginCommandBuffer( static_cast( m_commandBuffer ), + reinterpret_cast( &beginInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::end() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEndCommandBuffer && "Function requires " ); + + VkResult result = getDispatcher()->vkEndCommandBuffer( static_cast( m_commandBuffer ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandBuffer && "Function requires " ); + + VkResult result = + getDispatcher()->vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipeline && "Function requires " ); + + getDispatcher()->vkCmdBindPipeline( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewport && "Function requires " ); + + getDispatcher()->vkCmdSetViewport( + static_cast( m_commandBuffer ), firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setScissor( uint32_t firstScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissor && "Function requires " ); + + getDispatcher()->vkCmdSetScissor( + static_cast( m_commandBuffer ), firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineWidth && "Function requires " ); + + getDispatcher()->vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetBlendConstants && "Function requires " ); + + getDispatcher()->vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBounds && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilCompareMask && "Function requires " ); + + getDispatcher()->vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilWriteMask && "Function requires " ); + + getDispatcher()->vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilReference && "Function requires " ); + + getDispatcher()->vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets && "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer && "Function requires " ); + + getDispatcher()->vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers && "Function requires " ); + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } + + getDispatcher()->vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDraw && "Function requires " ); + + getDispatcher()->vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( + uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexed && "Function requires " ); + + getDispatcher()->vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirect && "Function requires " ); + + getDispatcher()->vkCmdDrawIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirect && "Function requires " ); + + getDispatcher()->vkCmdDrawIndexedIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatch && "Function requires " ); + + getDispatcher()->vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchIndirect && "Function requires " ); + + getDispatcher()->vkCmdDispatchIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer && "Function requires " ); + + getDispatcher()->vkCmdCopyBuffer( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage && "Function requires " ); + + getDispatcher()->vkCmdCopyImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage && "Function requires " ); + + getDispatcher()->vkCmdBlitImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage && "Function requires " ); + + getDispatcher()->vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( + VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer && "Function requires " ); + + getDispatcher()->vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdateBuffer && "Function requires " ); + + getDispatcher()->vkCmdUpdateBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdFillBuffer && "Function requires " ); + + getDispatcher()->vkCmdFillBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); + } + + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearColorImage && "Function requires " ); + + getDispatcher()->vkCmdClearColorImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearDepthStencilImage && "Function requires " ); + + getDispatcher()->vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearAttachments && "Function requires " ); + + getDispatcher()->vkCmdClearAttachments( static_cast( m_commandBuffer ), + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage && "Function requires " ); + + getDispatcher()->vkCmdResolveImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent && "Function requires " ); + + getDispatcher()->vkCmdSetEvent( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent && "Function requires " ); + + getDispatcher()->vkCmdResetEvent( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( + VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents && "Function requires " ); + + getDispatcher()->vkCmdWaitEvents( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier && "Function requires " ); + + getDispatcher()->vkCmdPipelineBarrier( static_cast( m_commandBuffer ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQuery && "Function requires " ); + + getDispatcher()->vkCmdBeginQuery( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQuery && "Function requires " ); + + getDispatcher()->vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetQueryPool && "Function requires " ); + + getDispatcher()->vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp && "Function requires " ); + + getDispatcher()->vkCmdWriteTimestamp( + static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyQueryPoolResults && "Function requires " ); + + getDispatcher()->vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants && "Function requires " ); + + getDispatcher()->vkCmdPushConstants( static_cast( m_commandBuffer ), + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( ValuesType ), + reinterpret_cast( values.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass && "Function requires " ); + + getDispatcher()->vkCmdBeginRenderPass( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + static_cast( contents ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass && "Function requires " ); + + getDispatcher()->vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass && "Function requires " ); + + getDispatcher()->vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteCommands && "Function requires " ); + + getDispatcher()->vkCmdExecuteCommands( + static_cast( m_commandBuffer ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceVersion && "Function requires " ); + + uint32_t apiVersion; + VkResult result = getDispatcher()->vkEnumerateInstanceVersion( &apiVersion ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" ); + + return apiVersion; + } + + VULKAN_HPP_INLINE void + Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2 && "Function requires or " ); + + VkResult result = getDispatcher()->vkBindBufferMemory2( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + } + + VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2 && "Function requires or " ); + + VkResult result = getDispatcher()->vkBindImageMemory2( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( static_cast( m_device ), + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } + + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMask && "Function requires or " ); + + getDispatcher()->vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBase && "Function requires or " ); + + getDispatcher()->vkCmdDispatchBase( + static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDeviceGroups() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroups && + "Function requires or " ); + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VkResult result; + do + { + result = getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ); + if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = + getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return physicalDeviceGroupProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2 && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements2( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VkResult result = + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return imageFormatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VkResult result = + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function requires or " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_INLINE void CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPool && "Function requires or " ); + + getDispatcher()->vkTrimCommandPool( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueInfo ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion + Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate + Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator ); + } + + template + VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplate && + "Function requires or " ); + + getDispatcher()->vkUpdateDescriptorSetWithTemplate( static_cast( m_device ), + static_cast( m_descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return structureChain; + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCount && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCount && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass + Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2 && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2 && "Function requires or " ); + + getDispatcher()->vkCmdNextSubpass2( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2 && "Function requires or " ); + + getDispatcher()->vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( &subpassEndInfo ) ); + } + + VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPool && "Function requires or " ); + + getDispatcher()->vkResetQueryPool( static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValue && + "Function requires or " ); + + uint64_t value; + VkResult result = getDispatcher()->vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( m_semaphore ), &value ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" ); + + return value; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphores && "Function requires or " ); + + VkResult result = + getDispatcher()->vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } + + VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphore && "Function requires or " ); + + VkResult result = getDispatcher()->vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( &signalInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddress && + "Function requires or or " ); + + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddress && + "Function requires or " ); + + uint64_t result = + getDispatcher()->vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress && + "Function requires or " ); + + uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), + reinterpret_cast( &info ) ); + + return result; + } + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getToolProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolProperties && + "Function requires or " ); + + std::vector toolProperties; + uint32_t toolCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast( m_physicalDevice ), &toolCount, nullptr ); + if ( ( result == VK_SUCCESS ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = getDispatcher()->vkGetPhysicalDeviceToolProperties( + static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return toolProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot + Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateData && "Function requires or " ); + + VkResult result = getDispatcher()->vkSetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateData && "Function requires or " ); + + uint64_t data; + getDispatcher()->vkGetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2 && "Function requires or " ); + + getDispatcher()->vkCmdSetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2 && "Function requires or " ); + + getDispatcher()->vkCmdResetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2 && "Function requires or " ); + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + } + + getDispatcher()->vkCmdWaitEvents2( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2 && "Function requires or " ); + + getDispatcher()->vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2 && "Function requires or " ); + + getDispatcher()->vkCmdWriteTimestamp2( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); + } + + VULKAN_HPP_INLINE void Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2 && "Function requires or " ); + + VkResult result = getDispatcher()->vkQueueSubmit2( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2 && "Function requires or " ); + + getDispatcher()->vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( ©BufferInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2 && "Function requires or " ); + + getDispatcher()->vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( ©ImageInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2 && + "Function requires or " ); + + getDispatcher()->vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferToImageInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2 && + "Function requires or " ); + + getDispatcher()->vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageToBufferInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2 && "Function requires or " ); + + getDispatcher()->vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( &blitImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2 && "Function requires or " ); + + getDispatcher()->vkCmdResolveImage2( static_cast( m_commandBuffer ), + reinterpret_cast( &resolveImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRendering && "Function requires or " ); + + getDispatcher()->vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( &renderingInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRendering() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRendering && "Function requires or " ); + + getDispatcher()->vkCmdEndRendering( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && + "Function requires or or " ); + + getDispatcher()->vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && + "Function requires or or " ); + + getDispatcher()->vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount && + "Function requires or or " ); + + getDispatcher()->vkCmdSetViewportWithCount( + static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount && + "Function requires or or " ); + + getDispatcher()->vkCmdSetScissorWithCount( + static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 && + "Function requires or or " ); + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + } + + getDispatcher()->vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilOp( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirements && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + //=== VK_KHR_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Bool32 supported; + VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + + return supported; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + + return surfaceCapabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR && "Function requires " ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &surfaceFormatCount, nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return surfaceFormats; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR && + "Function requires " ); + + std::vector presentModes; + uint32_t presentModeCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &presentModeCount, nullptr ); + if ( ( result == VK_SUCCESS ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return presentModes; + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR + Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getImages() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && "Function requires " ); + + std::vector swapchainImages; + uint32_t swapchainImageCount; + VkResult result; + do + { + result = getDispatcher()->vkGetSwapchainImagesKHR( + static_cast( m_device ), static_cast( m_swapchain ), &swapchainImageCount, nullptr ); + if ( ( result == VK_SUCCESS ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = getDispatcher()->vkGetSwapchainImagesKHR( static_cast( m_device ), + static_cast( m_swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return swapchainImages; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + SwapchainKHR::acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && "Function requires " ); + + uint32_t imageIndex; + VkResult result = getDispatcher()->vkAcquireNextImageKHR( static_cast( m_device ), + static_cast( m_swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + &imageIndex ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return std::make_pair( static_cast( result ), imageIndex ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && "Function requires " ); + + VkResult result = getDispatcher()->vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( &presentInfo ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR Device::getGroupPresentCapabilitiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + VkResult result = getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast( m_device ), reinterpret_cast( &deviceGroupPresentCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + + return deviceGroupPresentCapabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR( + static_cast( m_device ), static_cast( surface ), reinterpret_cast( &modes ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + + return modes; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR && + "Function requires or " ); + + std::vector rects; + uint32_t rectCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &rectCount, nullptr ); + if ( ( result == VK_SUCCESS ) && rectCount ) + { + rects.resize( rectCount ); + result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return rects; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && "Function requires or " ); + + uint32_t imageIndex; + VkResult result = getDispatcher()->vkAcquireNextImage2KHR( + static_cast( m_device ), reinterpret_cast( &acquireInfo ), &imageIndex ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return std::make_pair( static_cast( result ), imageIndex ); + } + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPropertiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPlanePropertiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs( *this, planeIndex ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkGetDisplayModePropertiesKHR( + static_cast( m_physicalDevice ), static_cast( m_display ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR + DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR + DisplayModeKHR::getDisplayPlaneCapabilities( uint32_t planeIndex ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( m_displayModeKHR ), + planeIndex, + reinterpret_cast( &capabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" ); + + return capabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + //=== VK_KHR_display_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs( *this, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR + Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator ); + } + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, &dpy, visualID ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( + uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &connection, visual_id ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, &display ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR && + "Function requires " ); + + VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT + Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && "Function requires " ); + + getDispatcher()->vkDebugReportMessageEXT( static_cast( m_instance ), + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } + + //=== VK_EXT_debug_marker === + + VULKAN_HPP_INLINE void Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT && "Function requires " ); + + VkResult result = + getDispatcher()->vkDebugMarkerSetObjectTagEXT( static_cast( m_device ), reinterpret_cast( &tagInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + } + + VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkDebugMarkerSetObjectNameEXT( static_cast( m_device ), + reinterpret_cast( &nameInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); + } + + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return capabilities; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); + VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); + + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ); + if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return videoFormatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR + Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector VideoSessionKHR::getMemoryRequirements() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR && + "Function requires " ); + + std::vector memoryRequirements; + uint32_t memoryRequirementsCount; + VkResult result; + do + { + result = getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( + static_cast( m_device ), static_cast( m_videoSession ), &memoryRequirementsCount, nullptr ); + if ( ( result == VK_SUCCESS ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = + getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), + static_cast( m_videoSession ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; + } + + VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR && "Function requires " ); + + VkResult result = + getDispatcher()->vkBindVideoSessionMemoryKHR( static_cast( m_device ), + static_cast( m_videoSession ), + bindSessionMemoryInfos.size(), + reinterpret_cast( bindSessionMemoryInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR + Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR && "Function requires " ); + + VkResult result = getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast( m_device ), + static_cast( m_videoSessionParameters ), + reinterpret_cast( &updateInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &beginInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &endCodingInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &codingControlInfo ) ); + } + + //=== VK_KHR_video_decode_queue === + + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && "Function requires " ); + + getDispatcher()->vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( &decodeInfo ) ); + } + + //=== VK_EXT_transform_feedback === + + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT && + "Function requires " ); + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } + + getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginTransformFeedbackEXT && "Function requires " ); + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } + + getDispatcher()->vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndTransformFeedbackEXT && "Function requires " ); + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } + + getDispatcher()->vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT && "Function requires " ); + + getDispatcher()->vkCmdBeginQueryIndexedEXT( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT && "Function requires " ); + + getDispatcher()->vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectByteCountEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); + } + + //=== VK_NVX_binary_import === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX + Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX + Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && "Function requires " ); + + getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( &launchInfo ) ); + } + + //=== VK_NVX_image_view_handle === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t + Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && "Function requires " ); + + uint32_t result = + getDispatcher()->vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && "Function requires " ); + + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + VkResult result = getDispatcher()->vkGetImageViewAddressNVX( + static_cast( m_device ), static_cast( m_imageView ), reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" ); + + return properties; + } + + //=== VK_AMD_draw_indirect_count === + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndexedIndirectCountAMD && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_AMD_shader_info === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && "Function requires " ); + + std::vector info; + size_t infoSize; + VkResult result; + do + { + result = getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( m_pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && infoSize ) + { + info.resize( infoSize ); + result = getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( m_pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return info; + } + + //=== VK_KHR_dynamic_rendering === + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), reinterpret_cast( &renderingInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && "Function requires or " ); + + getDispatcher()->vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); + } + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + VkResult result = getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + + return externalImageFormatProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV && "Function requires " ); + + HANDLE handle; + VkResult result = getDispatcher()->vkGetMemoryWin32HandleNV( + static_cast( m_device ), static_cast( m_memory ), static_cast( handleType ), &handle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" ); + + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VkResult result = + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return imageFormatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VkResult result = + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function requires or " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + //=== VK_KHR_device_group === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast( m_device ), + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } + + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && "Function requires or " ); + + getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && "Function requires or " ); + + getDispatcher()->vkCmdDispatchBaseKHR( + static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + VULKAN_HPP_INLINE void CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && "Function requires or " ); + + getDispatcher()->vkTrimCommandPoolKHR( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDeviceGroupsKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR && + "Function requires or " ); + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VkResult result; + do + { + result = getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ); + if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = + getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return physicalDeviceGroupProperties; + } + + //=== VK_KHR_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleKHR && "Function requires " ); + + HANDLE handle; + VkResult result = getDispatcher()->vkGetMemoryWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + + return handle; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + VkResult result = + getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + + return memoryWin32HandleProperties; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && "Function requires " ); + + int fd; + VkResult result = + getDispatcher()->vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + + return fd; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + VkResult result = getDispatcher()->vkGetMemoryFdPropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + fd, + reinterpret_cast( &memoryFdProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + + return memoryFdProperties; + } + + //=== VK_KHR_external_semaphore_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + VULKAN_HPP_INLINE void + Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreWin32HandleKHR && + "Function requires " ); + + VkResult result = getDispatcher()->vkImportSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &importSemaphoreWin32HandleInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreWin32HandleKHR && + "Function requires " ); + + HANDLE handle; + VkResult result = getDispatcher()->vkGetSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VULKAN_HPP_INLINE void Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR && "Function requires " ); + + VkResult result = getDispatcher()->vkImportSemaphoreFdKHR( static_cast( m_device ), + reinterpret_cast( &importSemaphoreFdInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && "Function requires " ); + + int fd; + VkResult result = + getDispatcher()->vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + + return fd; + } + + //=== VK_KHR_push_descriptor === + + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && "Function requires " ); + + getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR && + "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } + + //=== VK_EXT_conditional_rendering === + + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginConditionalRenderingEXT && + "Function requires " ); + + getDispatcher()->vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &conditionalRenderingBegin ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndConditionalRenderingEXT && + "Function requires " ); + + getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); + } + + //=== VK_KHR_descriptor_update_template === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate + Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR && + "Function requires or " ); + + getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR( + static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } + + template + VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR && + "Function requires or " ); + + getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( static_cast( m_device ), + static_cast( m_descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } + + //=== VK_NV_clip_space_w_scaling === + + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingNV && "Function requires " ); + + getDispatcher()->vkCmdSetViewportWScalingNV( static_cast( m_commandBuffer ), + firstViewport, + viewportWScalings.size(), + reinterpret_cast( viewportWScalings.data() ) ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT && "Function requires " ); + + VkResult result = + getDispatcher()->vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), &dpy, static_cast( display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, + RROutput rrOutput ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, dpy, rrOutput ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + + return surfaceCapabilities; + } + + //=== VK_EXT_display_control === + + VULKAN_HPP_INLINE void Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkDisplayPowerControlEXT( + static_cast( m_device ), static_cast( display ), reinterpret_cast( &displayPowerInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence + Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, deviceEventInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, display, displayEventInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && "Function requires " ); + + uint64_t counterValue; + VkResult result = getDispatcher()->vkGetSwapchainCounterEXT( + static_cast( m_device ), static_cast( m_swapchain ), static_cast( counter ), &counterValue ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" ); + + return counterValue; + } + + //=== VK_GOOGLE_display_timing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE SwapchainKHR::getRefreshCycleDurationGOOGLE() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRefreshCycleDurationGOOGLE && "Function requires " ); + + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + VkResult result = getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), + static_cast( m_swapchain ), + reinterpret_cast( &displayTimingProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" ); + + return displayTimingProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getPastPresentationTimingGOOGLE() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPastPresentationTimingGOOGLE && + "Function requires " ); + + std::vector presentationTimings; + uint32_t presentationTimingCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPastPresentationTimingGOOGLE( + static_cast( m_device ), static_cast( m_swapchain ), &presentationTimingCount, nullptr ); + if ( ( result == VK_SUCCESS ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), + static_cast( m_swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return presentationTimings; + } + + //=== VK_EXT_discard_rectangles === + + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( + uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEXT && "Function requires " ); + + getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast( m_commandBuffer ), + firstDiscardRectangle, + discardRectangles.size(), + reinterpret_cast( discardRectangles.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEnableEXT && + "Function requires " ); + + getDispatcher()->vkCmdSetDiscardRectangleEnableEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleModeEXT && + "Function requires " ); + + getDispatcher()->vkCmdSetDiscardRectangleModeEXT( static_cast( m_commandBuffer ), + static_cast( discardRectangleMode ) ); + } + + //=== VK_EXT_hdr_metadata === + + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && "Function requires " ); + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } + + getDispatcher()->vkSetHdrMetadataEXT( static_cast( m_device ), + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } + + //=== VK_KHR_create_renderpass2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass + Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && "Function requires or " ); + + getDispatcher()->vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( &subpassEndInfo ) ); + } + + //=== VK_KHR_shared_presentable_image === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainStatusKHR && "Function requires " ); + + VkResult result = getDispatcher()->vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( m_swapchain ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } + + //=== VK_KHR_external_fence_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceWin32HandleKHR && "Function requires " ); + + VkResult result = getDispatcher()->vkImportFenceWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &importFenceWin32HandleInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR && "Function requires " ); + + HANDLE handle; + VkResult result = getDispatcher()->vkGetFenceWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + VULKAN_HPP_INLINE void Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && "Function requires " ); + + VkResult result = + getDispatcher()->vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( &importFenceFdInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && "Function requires " ); + + int fd; + VkResult result = getDispatcher()->vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + + return fd; + } + + //=== VK_KHR_performance_query === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::pair, std::vector> + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function requires " ); + + std::pair, std::vector> data_; + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VkResult result; + do + { + result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr ); + if ( ( result == VK_SUCCESS ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + static_cast( m_physicalDevice ), + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return data_; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && + "Function requires " ); + + uint32_t numPasses; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &performanceQueryCreateInfo ), + &numPasses ); + + return numPasses; + } + + VULKAN_HPP_INLINE void Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR && "Function requires " ); + + VkResult result = + getDispatcher()->vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( &info ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + } + + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseProfilingLockKHR && "Function requires " ); + + getDispatcher()->vkReleaseProfilingLockKHR( static_cast( m_device ) ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return surfaceCapabilities; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get(); + VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return surfaceFormats; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); + + std::vector structureChains; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ); + if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get() = surfaceFormats[i]; + } + return structureChains; + } + + //=== VK_KHR_get_display_properties2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayProperties2KHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPlaneProperties2KHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = getDispatcher()->vkGetDisplayModeProperties2KHR( + static_cast( m_physicalDevice ), static_cast( m_display ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + + return capabilities; + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VULKAN_HPP_INLINE void Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkSetDebugUtilsObjectNameEXT( static_cast( m_device ), + reinterpret_cast( &nameInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + } + + VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT && "Function requires " ); + + VkResult result = + getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( &tagInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + } + + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueEndDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); + } + + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT && "Function requires " ); + + getDispatcher()->vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT + Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void + Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT && "Function requires " ); + + getDispatcher()->vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer * + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID && + "Function requires " ); + + struct AHardwareBuffer * buffer; + VkResult result = getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID( + static_cast( m_device ), reinterpret_cast( &info ), &buffer ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + + return buffer; + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + VkResult result = getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX( + static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( &sizeInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" ); + + return sizeInfo; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t + Pipeline::getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX && + "Function requires " ); + + uint32_t nodeIndex; + VkResult result = + getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast( m_device ), + static_cast( m_pipeline ), + reinterpret_cast( &nodeInfo ), + &nodeIndex ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" ); + + return nodeIndex; + } + + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX && + "Function requires " ); + + getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), static_cast( scratch ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + reinterpret_cast( &countInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + reinterpret_cast( &countInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX && + "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( + static_cast( m_commandBuffer ), static_cast( scratch ), static_cast( countInfo ) ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + VULKAN_HPP_INLINE void + CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT && "Function requires " ); + + getDispatcher()->vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &sampleLocationsInfo ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), + static_cast( samples ), + reinterpret_cast( &multisampleProperties ) ); + + return multisampleProperties; + } + + //=== VK_KHR_get_memory_requirements2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2KHR && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + //=== VK_KHR_acceleration_structure === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR + Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresKHR && + "Function requires " ); + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } + + getDispatcher()->vkCmdBuildAccelerationStructuresKHR( + static_cast( m_commandBuffer ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR && + "Function requires " ); + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } + + getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBuildAccelerationStructuresKHR && + "Function requires " ); + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } + + VkResult result = getDispatcher()->vkBuildAccelerationStructuresKHR( + static_cast( m_device ), + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureKHR && + "Function requires " ); + + VkResult result = getDispatcher()->vkCopyAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR && + "Function requires " ); + + VkResult result = + getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR && + "Function requires " ); + + VkResult result = + getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = + getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); + + DataType data; + VkResult result = + getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + + return data; + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureKHR && + "Function requires " ); + + getDispatcher()->vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR && + "Function requires " ); + + getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR && + "Function requires " ); + + getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR && + "Function requires " ); + + VkDeviceAddress result = getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR( + static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); + + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureBuildSizesKHR && + "Function requires " ); + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } + + //=== VK_KHR_ray_tracing_pipeline === + + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && "Function requires " ); + + getDispatcher()->vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, deferredOperation, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, deferredOperation, pipelineCache, createInfo, allocator ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); + + DataType data; + VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirectKHR && "Function requires " ); + + getDispatcher()->vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Pipeline::getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR && + "Function requires " ); + + VkDeviceSize result = getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR( + static_cast( m_device ), static_cast( m_pipeline ), group, static_cast( groupShader ) ); + + return static_cast( result ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR && + "Function requires " ); + + getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion + Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroySamplerYcbcrConversionKHR && + "Function requires or " ); + + getDispatcher()->vkDestroySamplerYcbcrConversionKHR( + static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } + + //=== VK_KHR_bind_memory2 === + + VULKAN_HPP_INLINE void + Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && "Function requires or " ); + + VkResult result = getDispatcher()->vkBindBufferMemory2KHR( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + } + + VULKAN_HPP_INLINE void + Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && "Function requires or " ); + + VkResult result = getDispatcher()->vkBindImageMemory2KHR( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + } + + //=== VK_EXT_image_drm_format_modifier === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT Image::getDrmFormatModifierPropertiesEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + VkResult result = getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT( + static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" ); + + return properties; + } + + //=== VK_EXT_validation_cache === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT + Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void ValidationCacheEXT::merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkMergeValidationCachesEXT( static_cast( m_device ), + static_cast( m_validationCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ValidationCacheEXT::getData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT && "Function requires " ); + + std::vector data; + size_t dataSize; + VkResult result; + do + { + result = getDispatcher()->vkGetValidationCacheDataEXT( + static_cast( m_device ), static_cast( m_validationCache ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = getDispatcher()->vkGetValidationCacheDataEXT( + static_cast( m_device ), static_cast( m_validationCache ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; + } + + //=== VK_NV_shading_rate_image === + + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV && "Function requires " ); + + getDispatcher()->vkCmdBindShadingRateImageNV( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportShadingRatePaletteNV && + "Function requires " ); + + getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( static_cast( m_commandBuffer ), + firstViewport, + shadingRatePalettes.size(), + reinterpret_cast( shadingRatePalettes.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV && "Function requires " ); + + getDispatcher()->vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } + + //=== VK_NV_ray_tracing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV + Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindAccelerationStructureMemoryNV && + "Function requires " ); + + VkResult result = getDispatcher()->vkBindAccelerationStructureMemoryNV( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV && "Function requires " ); + + getDispatcher()->vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV && "Function requires " ); + + getDispatcher()->vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && "Function requires " ); + + getDispatcher()->vkCmdTraceRaysNV( static_cast( m_commandBuffer ), + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineNV( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); + + DataType data; + VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector AccelerationStructureNV::getHandle( size_t dataSize ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast( m_device ), + static_cast( m_accelerationStructure ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType AccelerationStructureNV::getHandle() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast( m_device ), + static_cast( m_accelerationStructure ), + sizeof( DataType ), + reinterpret_cast( &data ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV && + "Function requires " ); + + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + + VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && "Function requires " ); + + VkResult result = getDispatcher()->vkCompileDeferredNV( static_cast( m_device ), static_cast( m_pipeline ), shader ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" ); + } + + //=== VK_KHR_maintenance3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return structureChain; + } + + //=== VK_KHR_draw_indirect_count === + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndexedIndirectCountKHR && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_EXT_external_memory_host === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + VkResult result = + getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + + return memoryHostPointerProperties; + } + + //=== VK_AMD_buffer_marker === + + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && "Function requires " ); + + getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsEXT() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function requires or " ); + + std::vector timeDomains; + uint32_t timeDomainCount; + VkResult result; + do + { + result = + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast( m_physicalDevice ), &timeDomainCount, nullptr ); + if ( ( result == VK_SUCCESS ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return timeDomains; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, uint64_t> Device::getCalibratedTimestampsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && + "Function requires or " ); + + std::pair, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast( m_device ), + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return data_; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && + "Function requires or " ); + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT( + static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + + return data_; + } + + //=== VK_NV_mesh_shader === + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectNV( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_NV_scissor_exclusive === + + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( + uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorEnableNV && + "Function requires " ); + + getDispatcher()->vkCmdSetExclusiveScissorEnableNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissorEnables.size(), + reinterpret_cast( exclusiveScissorEnables.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( + uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV && "Function requires " ); + + getDispatcher()->vkCmdSetExclusiveScissorNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissors.size(), + reinterpret_cast( exclusiveScissors.data() ) ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCheckpointNV && "Function requires " ); + + getDispatcher()->vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), reinterpret_cast( &checkpointMarker ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointDataNV && + "Function requires " ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointDataNV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointDataNV( + static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + //=== VK_KHR_timeline_semaphore === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValueKHR && + "Function requires or " ); + + uint64_t value; + VkResult result = getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( m_semaphore ), &value ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" ); + + return value; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && "Function requires or " ); + + VkResult result = + getDispatcher()->vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } + + VULKAN_HPP_INLINE void Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && "Function requires or " ); + + VkResult result = + getDispatcher()->vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( &signalInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + } + + //=== VK_INTEL_performance_query === + + VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkInitializePerformanceApiINTEL && + "Function requires " ); + + VkResult result = getDispatcher()->vkInitializePerformanceApiINTEL( static_cast( m_device ), + reinterpret_cast( &initializeInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + } + + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUninitializePerformanceApiINTEL && + "Function requires " ); + + getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceMarkerINTEL && "Function requires " ); + + VkResult result = getDispatcher()->vkCmdSetPerformanceMarkerINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL && + "Function requires " ); + + VkResult result = getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceOverrideINTEL && + "Function requires " ); + + VkResult result = getDispatcher()->vkCmdSetPerformanceOverrideINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( &overrideInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL + Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL( *this, acquireInfo ); + } + + VULKAN_HPP_INLINE void Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL && + "Function requires " ); + + VkResult result = getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( static_cast( m_queue ), + static_cast( configuration ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPerformanceParameterINTEL && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + VkResult result = getDispatcher()->vkGetPerformanceParameterINTEL( + static_cast( m_device ), static_cast( parameter ), reinterpret_cast( &value ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + + return value; + } + + //=== VK_AMD_display_native_hdr === + + VULKAN_HPP_INLINE void SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && "Function requires " ); + + getDispatcher()->vkSetLocalDimmingAMD( + static_cast( m_device ), static_cast( m_swapchain ), static_cast( localDimmingEnable ) ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getFragmentShadingRatesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function requires " ); + + std::vector fragmentShadingRates; + uint32_t fragmentShadingRateCount; + VkResult result; + do + { + result = + getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast( m_physicalDevice ), &fragmentShadingRateCount, nullptr ); + if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( + static_cast( m_physicalDevice ), + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return fragmentShadingRates; + } + + VULKAN_HPP_INLINE void + CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateKHR && + "Function requires " ); + + getDispatcher()->vkCmdSetFragmentShadingRateKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &fragmentSize ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_EXT_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferDeviceAddressEXT && + "Function requires or or " ); + + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + //=== VK_EXT_tooling_info === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getToolPropertiesEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT && + "Function requires or " ); + + std::vector toolProperties; + uint32_t toolCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast( m_physicalDevice ), &toolCount, nullptr ); + if ( ( result == VK_SUCCESS ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( + static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return toolProperties; + } + + //=== VK_KHR_present_wait === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && "Function requires " ); + + VkResult result = + getDispatcher()->vkWaitForPresentKHR( static_cast( m_device ), static_cast( m_swapchain ), presentId, timeout ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } + + //=== VK_NV_cooperative_matrix === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixPropertiesNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = + getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast( m_physicalDevice ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + //=== VK_NV_coverage_reduction_mode === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function requires " ); + + std::vector combinations; + uint32_t combinationCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), &combinationCount, nullptr ); + if ( ( result == VK_SUCCESS ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), + &combinationCount, + reinterpret_cast( combinations.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return combinations; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function requires " ); + + std::vector presentModes; + uint32_t presentModeCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ); + if ( ( result == VK_SUCCESS ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return presentModes; + } + + VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT && + "Function requires " ); + + VkResult result = getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchain ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" ); + } + + VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT && + "Function requires " ); + + VkResult result = getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchain ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &modes ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + + return modes; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + //=== VK_KHR_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferDeviceAddressKHR && + "Function requires or or " ); + + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR && + "Function requires or " ); + + uint64_t result = + getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR && + "Function requires or " ); + + uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), + reinterpret_cast( &info ) ); + + return result; + } + + //=== VK_EXT_line_rasterization === + + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && "Function requires " ); + + getDispatcher()->vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && "Function requires or " ); + + getDispatcher()->vkResetQueryPoolEXT( static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetViewportWithCountEXT( + static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetScissorWithCountEXT( + static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT && + "Function requires or or " ); + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } + + getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + //=== VK_KHR_deferred_host_operations === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR + Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR( *this, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR && + "Function requires " ); + + uint32_t result = + getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), static_cast( m_operation ) ); + + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationResultKHR && + "Function requires " ); + + VkResult result = + getDispatcher()->vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( m_operation ) ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDeferredOperationJoinKHR && "Function requires " ); + + VkResult result = getDispatcher()->vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( m_operation ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + + return static_cast( result ); + } + + //=== VK_KHR_pipeline_executable_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutablePropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t executableCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR( + static_cast( m_device ), reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ); + if ( ( result == VK_SUCCESS ) && executableCount ) + { + properties.resize( executableCount ); + result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableStatisticsKHR && + "Function requires " ); + + std::vector statistics; + uint32_t statisticCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR( + static_cast( m_device ), reinterpret_cast( &executableInfo ), &statisticCount, nullptr ); + if ( ( result == VK_SUCCESS ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return statistics; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR && + "Function requires " ); + + std::vector internalRepresentations; + uint32_t internalRepresentationCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( + static_cast( m_device ), reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ); + if ( ( result == VK_SUCCESS ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( + static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return internalRepresentations; + } + + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyMemoryToImageEXT( static_cast( m_device ), + reinterpret_cast( ©MemoryToImageInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + } + + VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyImageToMemoryEXT( static_cast( m_device ), + reinterpret_cast( ©ImageToMemoryInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + } + + VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyImageToImageEXT( static_cast( m_device ), + reinterpret_cast( ©ImageToImageInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + } + + VULKAN_HPP_INLINE void Device::transitionImageLayoutEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkTransitionImageLayoutEXT( + static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + //=== VK_KHR_map_memory2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2KHR && "Function requires " ); + + void * pData; + VkResult result = + getDispatcher()->vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); + + return pData; + } + + VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function requires " ); + + getDispatcher()->vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ); + } + + //=== VK_EXT_swapchain_maintenance1 === + + VULKAN_HPP_INLINE void Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseSwapchainImagesEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkReleaseSwapchainImagesEXT( static_cast( m_device ), + reinterpret_cast( &releaseInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); + } + + //=== VK_NV_device_generated_commands === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_INLINE void + CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsNV && + "Function requires " ); + + getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), + reinterpret_cast( &generatedCommandsInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsNV && + "Function requires " ); + + getDispatcher()->vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipelineShaderGroupNV && + "Function requires " ); + + getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV + Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV( *this, createInfo, allocator ); + } + + //=== VK_EXT_depth_bias_control === + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias2EXT && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBias2EXT( static_cast( m_commandBuffer ), reinterpret_cast( &depthBiasInfo ) ); + } + + //=== VK_EXT_acquire_drm_display === + + VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && "Function requires " ); + + VkResult result = + getDispatcher()->vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, drmFd, connectorId ); + } + + //=== VK_EXT_private_data === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot + Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT && + "Function requires or " ); + + getDispatcher()->vkDestroyPrivateDataSlotEXT( + static_cast( m_device ), + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } + + VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && "Function requires or " ); + + VkResult result = getDispatcher()->vkSetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && "Function requires or " ); + + uint64_t data; + getDispatcher()->vkGetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } + + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; + VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return qualityLevelProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = + structureChain.template get(); + VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR && + "Function requires " ); + + std::pair> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VkResult result; + do + { + result = getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return data_; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, std::vector> + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR && + "Function requires " ); + + std::pair, std::vector> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VkResult result; + do + { + result = getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return data_; + } + + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && "Function requires " ); + + getDispatcher()->vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( &encodeInfo ) ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV + Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector CudaModuleNV::getCache() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCudaModuleCacheNV && "Function requires " ); + + std::vector cacheData; + size_t cacheSize; + VkResult result; + do + { + result = getDispatcher()->vkGetCudaModuleCacheNV( static_cast( m_device ), static_cast( m_module ), &cacheSize, nullptr ); + if ( ( result == VK_SUCCESS ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = getDispatcher()->vkGetCudaModuleCacheNV( + static_cast( m_device ), static_cast( m_module ), &cacheSize, reinterpret_cast( cacheData.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CudaModuleNV::getCache" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return cacheData; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV + Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCudaLaunchKernelNV && "Function requires " ); + + getDispatcher()->vkCmdCudaLaunchKernelNV( static_cast( m_commandBuffer ), reinterpret_cast( &launchInfo ) ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; + getDispatcher()->vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( &metalObjectsInfo ) ); + + return metalObjectsInfo; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get(); + getDispatcher()->vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( &metalObjectsInfo ) ); + + return structureChain; + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && "Function requires or " ); + + getDispatcher()->vkCmdSetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && "Function requires or " ); + + getDispatcher()->vkCmdResetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && "Function requires or " ); + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } + + getDispatcher()->vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdWriteTimestamp2KHR( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); + } + + VULKAN_HPP_INLINE void Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && "Function requires or " ); + + VkResult result = getDispatcher()->vkQueueSubmit2KHR( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function requires " ); + + getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && "Function requires " ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointData2NV( + static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + //=== VK_EXT_descriptor_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getSizeEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSizeEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; + getDispatcher()->vkGetDescriptorSetLayoutSizeEXT( static_cast( m_device ), + static_cast( m_descriptorSetLayout ), + reinterpret_cast( &layoutSizeInBytes ) ); + + return layoutSizeInBytes; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + DescriptorSetLayout::getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceSize offset; + getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT( + static_cast( m_device ), static_cast( m_descriptorSetLayout ), binding, reinterpret_cast( &offset ) ); + + return offset; + } + + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function requires " ); + + getDispatcher()->vkGetDescriptorEXT( + static_cast( m_device ), reinterpret_cast( &descriptorInfo ), dataSize, pDescriptor ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType + Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function requires " ); + + DescriptorType descriptor; + getDispatcher()->vkGetDescriptorEXT( static_cast( m_device ), + reinterpret_cast( &descriptorInfo ), + sizeof( DescriptorType ), + reinterpret_cast( &descriptor ) ); + + return descriptor; + } + + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBuffersEXT && "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorBuffersEXT( static_cast( m_commandBuffer ), + bindingInfos.size(), + reinterpret_cast( bindingInfos.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT && + "Function requires " ); + if ( bufferIndices.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); + } + + getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + bufferIndices.size(), + bufferIndices.data(), + reinterpret_cast( offsets.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT && + "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); + + return data; + } + + //=== VK_NV_fragment_shading_rate_enums === + + VULKAN_HPP_INLINE void + CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateEnumNV && + "Function requires " ); + + getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); + } + + //=== VK_EXT_mesh_shader === + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectEXT( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + //=== VK_KHR_copy_commands2 === + + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && "Function requires or " ); + + getDispatcher()->vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( ©BufferInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && "Function requires or " ); + + getDispatcher()->vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( ©ImageInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferToImageInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageToBufferInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && "Function requires or " ); + + getDispatcher()->vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( &blitImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && "Function requires or " ); + + getDispatcher()->vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &resolveImageInfo ) ); + } + + //=== VK_EXT_device_fault === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::pair> + Device::getFaultInfoEXT() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceFaultInfoEXT && "Function requires " ); + + std::pair data_; + VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first; + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second; + VkResult result = getDispatcher()->vkGetDeviceFaultInfoEXT( + static_cast( m_device ), reinterpret_cast( &faultCounts ), reinterpret_cast( &faultInfo ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + + return std::make_pair( static_cast( result ), data_ ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VULKAN_HPP_INLINE void DisplayKHR::acquireWinrtNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV && "Function requires " ); + + VkResult result = getDispatcher()->vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( m_display ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const + { + return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, deviceRelativeId ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, &dfb ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions ) const + VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA && "Function requires " ); + + zx_handle_t zirconHandle; + VkResult result = getDispatcher()->vkGetMemoryZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + + return zirconHandle; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + VkResult result = + getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast( m_device ), + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + + return memoryZirconHandleProperties; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + VULKAN_HPP_INLINE void + Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA && + "Function requires " ); + + VkResult result = getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &importSemaphoreZirconHandleInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA && + "Function requires " ); + + zx_handle_t zirconHandle; + VkResult result = getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + + return zirconHandle; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA + Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA && + "Function requires " ); + + VkResult result = + getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &imageConstraintsInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" ); + } + + VULKAN_HPP_INLINE void + BufferCollectionFUCHSIA::setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA && + "Function requires " ); + + VkResult result = + getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &bufferConstraintsInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; + VkResult result = getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" ); + + return properties; + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; + VkResult result = getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &maxWorkgroupSize ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + + return std::make_pair( static_cast( result ), maxWorkgroupSize ); + } + + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSubpassShadingHUAWEI && "Function requires " ); + + getDispatcher()->vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); + } + + //=== VK_HUAWEI_invocation_mask === + + VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindInvocationMaskHUAWEI && "Function requires " ); + + getDispatcher()->vkCmdBindInvocationMaskHUAWEI( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + } + + //=== VK_NV_external_memory_rdma === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryRemoteAddressNV && "Function requires " ); + + VULKAN_HPP_NAMESPACE::RemoteAddressNV address; + VkResult result = getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast( m_device ), + reinterpret_cast( &memoryGetRemoteAddressInfo ), + reinterpret_cast( &address ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + + return address; + } + + //=== VK_EXT_pipeline_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BaseOutStructure + Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelinePropertiesEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; + VkResult result = getDispatcher()->vkGetPipelinePropertiesEXT( static_cast( m_device ), + reinterpret_cast( &pipelineInfo ), + reinterpret_cast( &pipelineProperties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + + return pipelineProperties; + } + + //=== VK_EXT_extended_dynamic_state2 === + + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR + Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, &window ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteEnableEXT && "Function requires " ); + + getDispatcher()->vkCmdSetColorWriteEnableEXT( + static_cast( m_commandBuffer ), colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); + } + + //=== VK_KHR_ray_tracing_maintenance1 === + + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirect2KHR && "Function requires " ); + + getDispatcher()->vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); + } + + //=== VK_EXT_multi_draw === + + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), + vertexInfo.size(), + reinterpret_cast( vertexInfo.data() ), + instanceCount, + firstInstance, + vertexInfo.stride() ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMultiIndexedEXT( static_cast( m_commandBuffer ), + indexInfo.size(), + reinterpret_cast( indexInfo.data() ), + instanceCount, + firstInstance, + indexInfo.stride(), + static_cast( vertexOffset ) ); + } + + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::MicromapEXT + Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::MicromapEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildMicromapsEXT && "Function requires " ); + + getDispatcher()->vkCmdBuildMicromapsEXT( + static_cast( m_commandBuffer ), infos.size(), reinterpret_cast( infos.data() ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBuildMicromapsEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkBuildMicromapsEXT( static_cast( m_device ), + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyMicromapEXT( + static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( &info ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapToMemoryEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToMicromapEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); + + return data; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function requires " ); + + DataType data; + VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapEXT && "Function requires " ); + + getDispatcher()->vkCmdCopyMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( &info ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapToMemoryEXT && "Function requires " ); + + getDispatcher()->vkCmdCopyMicromapToMemoryEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToMicromapEXT && "Function requires " ); + + getDispatcher()->vkCmdCopyMemoryToMicromapEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteMicromapsPropertiesEXT && + "Function requires " ); + + getDispatcher()->vkCmdWriteMicromapsPropertiesEXT( static_cast( m_commandBuffer ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMicromapCompatibilityEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + getDispatcher()->vkGetDeviceMicromapCompatibilityEXT( static_cast( m_device ), + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMicromapBuildSizesEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; + getDispatcher()->vkGetMicromapBuildSizesEXT( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } + + //=== VK_HUAWEI_cluster_culling_shader === + + VULKAN_HPP_INLINE void CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterHUAWEI && "Function requires " ); + + getDispatcher()->vkCmdDrawClusterHUAWEI( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterIndirectHUAWEI && + "Function requires " ); + + getDispatcher()->vkCmdDrawClusterIndirectHUAWEI( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); + } + + //=== VK_EXT_pageable_device_local_memory === + + VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDeviceMemoryPriorityEXT && + "Function requires " ); + + getDispatcher()->vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( m_memory ), priority ); + } + + //=== VK_KHR_maintenance4 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + //=== VK_VALVE_descriptor_set_host_mapping === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE( + const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; + getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast( m_device ), + reinterpret_cast( &bindingReference ), + reinterpret_cast( &hostMapping ) ); + + return hostMapping; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DescriptorSet::getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetHostMappingVALVE && + "Function requires " ); + + void * pData; + getDispatcher()->vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( m_descriptorSet ), &pData ); + + return pData; + } + + //=== VK_NV_copy_memory_indirect === + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryIndirectNV && "Function requires " ); + + getDispatcher()->vkCmdCopyMemoryIndirectNV( + static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( + VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToImageIndirectNV && + "Function requires " ); + + getDispatcher()->vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), + static_cast( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( imageSubresources.data() ) ); + } + + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryNV && "Function requires " ); + + getDispatcher()->vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), + decompressMemoryRegions.size(), + reinterpret_cast( decompressMemoryRegions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryIndirectCountNV && + "Function requires " ); + + getDispatcher()->vkCmdDecompressMemoryIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( indirectCommandsAddress ), + static_cast( indirectCommandsCountAddress ), + stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV && + "Function requires " ); + + getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectDeviceAddressNV && + "Function requires " ); + + VkDeviceAddress result = getDispatcher()->vkGetPipelineIndirectDeviceAddressNV( + static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + //=== VK_EXT_extended_dynamic_state3 === + + VULKAN_HPP_INLINE void + CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), + static_cast( domainOrigin ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), + static_cast( rasterizationSamples ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT && + "Function requires or " ); + if ( sampleMask.size() != ( static_cast( samples ) + 31 ) / 32 ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast( samples ) + 31 ) / 32" ); + } + + getDispatcher()->vkCmdSetSampleMaskEXT( static_cast( m_commandBuffer ), + static_cast( samples ), + reinterpret_cast( sampleMask.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( + uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendEnables.size(), + reinterpret_cast( colorBlendEnables.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendEquations.size(), + reinterpret_cast( colorBlendEquations.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorWriteMasks.size(), + reinterpret_cast( colorWriteMasks.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT( + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( conservativeRasterizationMode ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendAdvanced.size(), + reinterpret_cast( colorBlendAdvanced.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), + static_cast( provokingVertexMode ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( lineRasterizationMode ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast( m_commandBuffer ), + firstViewport, + viewportSwizzles.size(), + reinterpret_cast( viewportSwizzles.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), + static_cast( coverageModulationMode ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), + static_cast( coverageModulationTableEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageModulationTableNV( + static_cast( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), + static_cast( representativeFragmentTestEnable ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV && + "Function requires or " ); + + getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), + static_cast( coverageReductionMode ) ); + } + + //=== VK_EXT_shader_module_identifier === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleIdentifierEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + getDispatcher()->vkGetShaderModuleIdentifierEXT( + static_cast( m_device ), static_cast( m_shaderModule ), reinterpret_cast( &identifier ) ); + + return identifier; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &identifier ) ); + + return identifier; + } + + //=== VK_NV_optical_flow === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function requires " ); + + std::vector imageFormatProperties; + uint32_t formatCount; + VkResult result; + do + { + result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + static_cast( m_physicalDevice ), + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + nullptr ); + if ( ( result == VK_SUCCESS ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + static_cast( m_physicalDevice ), + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return imageFormatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV + Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV( *this, createInfo, allocator ); + } + + VULKAN_HPP_INLINE void OpticalFlowSessionNV::bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindOpticalFlowSessionImageNV && "Function requires " ); + + VkResult result = getDispatcher()->vkBindOpticalFlowSessionImageNV( static_cast( m_device ), + static_cast( m_session ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdOpticalFlowExecuteNV && "Function requires " ); + + getDispatcher()->vkCmdOpticalFlowExecuteNV( static_cast( m_commandBuffer ), + static_cast( session ), + reinterpret_cast( &executeInfo ) ); + } + + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && "Function requires " ); + + getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast( m_device ), + reinterpret_cast( &renderingAreaInfo ), + reinterpret_cast( &granularity ) ); + + return granularity; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2KHR && + "Function requires or or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2KHR && + "Function requires or or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs( *this, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderEXT + Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ShaderEXT::getBinaryData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderBinaryDataEXT && "Function requires " ); + + std::vector data; + size_t dataSize; + VkResult result; + do + { + result = getDispatcher()->vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( m_shader ), &dataSize, nullptr ); + if ( ( result == VK_SUCCESS ) && dataSize ) + { + data.resize( dataSize ); + result = getDispatcher()->vkGetShaderBinaryDataEXT( + static_cast( m_device ), static_cast( m_shader ), &dataSize, reinterpret_cast( data.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadersEXT && "Function requires " ); + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } + + getDispatcher()->vkCmdBindShadersEXT( static_cast( m_commandBuffer ), + stages.size(), + reinterpret_cast( stages.data() ), + reinterpret_cast( shaders.data() ) ); + } + + //=== VK_QCOM_tile_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Framebuffer::getTilePropertiesQCOM() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFramebufferTilePropertiesQCOM && + "Function requires " ); + + std::vector properties; + uint32_t propertiesCount; + VkResult result; + do + { + result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM( + static_cast( m_device ), static_cast( m_framebuffer ), &propertiesCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast( m_device ), + static_cast( m_framebuffer ), + &propertiesCount, + reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; + getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), + reinterpret_cast( &renderingInfo ), + reinterpret_cast( &properties ) ); + + return properties; + } + + //=== VK_NV_low_latency2 === + + VULKAN_HPP_INLINE void SwapchainKHR::setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencySleepModeNV && "Function requires " ); + + VkResult result = getDispatcher()->vkSetLatencySleepModeNV( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &sleepModeInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::setLatencySleepModeNV" ); + } + + VULKAN_HPP_INLINE void SwapchainKHR::latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkLatencySleepNV && "Function requires " ); + + VkResult result = getDispatcher()->vkLatencySleepNV( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &sleepInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::latencySleepNV" ); + } + + VULKAN_HPP_INLINE void SwapchainKHR::setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencyMarkerNV && "Function requires " ); + + getDispatcher()->vkSetLatencyMarkerNV( static_cast( m_device ), + static_cast( m_swapchain ), + reinterpret_cast( &latencyMarkerInfo ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV SwapchainKHR::getLatencyTimingsNV() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetLatencyTimingsNV && "Function requires " ); + + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + getDispatcher()->vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + + return latencyMarkerInfo; + } + + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueNotifyOutOfBandNV && "Function requires " ); + + getDispatcher()->vkQueueNotifyOutOfBandNV( static_cast( m_queue ), reinterpret_cast( &queueTypeInfo ) ); + } + + //=== VK_KHR_cooperative_matrix === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixPropertiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VkResult result; + do + { + result = + getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); + if ( ( result == VK_SUCCESS ) && propertyCount ) + { + properties.resize( propertyCount ); + result = getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT && + "Function requires " ); + + getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast( m_commandBuffer ), + static_cast( aspectMask ) ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; + VkResult result = getDispatcher()->vkGetScreenBufferPropertiesQNX( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get(); + VkResult result = getDispatcher()->vkGetScreenBufferPropertiesQNX( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return structureChain; + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsKHR() const + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function requires or " ); + + std::vector timeDomains; + uint32_t timeDomainCount; + VkResult result; + do + { + result = + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( static_cast( m_physicalDevice ), &timeDomainCount, nullptr ); + if ( ( result == VK_SUCCESS ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); + } + } while ( result == VK_INCOMPLETE ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return timeDomains; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, uint64_t> Device::getCalibratedTimestampsKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsKHR && + "Function requires or " ); + + std::pair, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = getDispatcher()->vkGetCalibratedTimestampsKHR( static_cast( m_device ), + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return data_; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsKHR && + "Function requires or " ); + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VkResult result = getDispatcher()->vkGetCalibratedTimestampsKHR( + static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); + + return data_; + } + + //=== VK_KHR_maintenance6 === + + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2KHR && "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorSetsInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2KHR && "Function requires " ); + + getDispatcher()->vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &pushConstantsInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2KHR && "Function requires " ); + + getDispatcher()->vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &pushDescriptorSetInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2KHR( + const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR && + "Function requires " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR( + static_cast( m_commandBuffer ), + reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsets2EXT( + const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsets2EXT && + "Function requires " ); + + getDispatcher()->vkCmdSetDescriptorBufferOffsets2EXT( static_cast( m_commandBuffer ), + reinterpret_cast( &setDescriptorBufferOffsetsInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && + "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorBufferEmbeddedSamplersInfo ) ); + } + + //==================== + //=== RAII Helpers === + //==================== + + template + std::vector filterCppTypes( std::vector const & raiiTypes ) + { + std::vector cppTypes( raiiTypes.size() ); + std::transform( raiiTypes.begin(), raiiTypes.end(), cppTypes.begin(), []( RAIIType const & d ) { return *d; } ); + return cppTypes; + } + + template + std::vector filterCppTypes( std::vector const & raiiTypes, UnaryPredicate p ) + { + std::vector cppTypes; + for ( auto const & t : raiiTypes ) + { + if ( p( t ) ) + { + cppTypes.push_back( *t ); + } + } + return cppTypes; + } + + } // namespace VULKAN_HPP_RAII_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE +#endif +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_screen.h b/MacroLibX/third_party/vulkan/vulkan_screen.h new file mode 100644 index 0000000..981738f --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_screen.h @@ -0,0 +1,108 @@ +#ifndef VULKAN_SCREEN_H_ +#define VULKAN_SCREEN_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_QNX_screen_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_QNX_screen_surface 1 +#define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1 +#define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface" +typedef VkFlags VkScreenSurfaceCreateFlagsQNX; +typedef struct VkScreenSurfaceCreateInfoQNX { + VkStructureType sType; + const void* pNext; + VkScreenSurfaceCreateFlagsQNX flags; + struct _screen_context* context; + struct _screen_window* window; +} VkScreenSurfaceCreateInfoQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateScreenSurfaceQNX)(VkInstance instance, const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateScreenSurfaceQNX( + VkInstance instance, + const VkScreenSurfaceCreateInfoQNX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window* window); +#endif + + +// VK_QNX_external_memory_screen_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_QNX_external_memory_screen_buffer 1 +#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION 1 +#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME "VK_QNX_external_memory_screen_buffer" +typedef struct VkScreenBufferPropertiesQNX { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkScreenBufferPropertiesQNX; + +typedef struct VkScreenBufferFormatPropertiesQNX { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + uint64_t screenUsage; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkScreenBufferFormatPropertiesQNX; + +typedef struct VkImportScreenBufferInfoQNX { + VkStructureType sType; + const void* pNext; + struct _screen_buffer* buffer; +} VkImportScreenBufferInfoQNX; + +typedef struct VkExternalFormatQNX { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatQNX; + +typedef struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX { + VkStructureType sType; + void* pNext; + VkBool32 screenBufferImport; +} VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkGetScreenBufferPropertiesQNX)(VkDevice device, const struct _screen_buffer* buffer, VkScreenBufferPropertiesQNX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetScreenBufferPropertiesQNX( + VkDevice device, + const struct _screen_buffer* buffer, + VkScreenBufferPropertiesQNX* pProperties); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_shared.hpp b/MacroLibX/third_party/vulkan/vulkan_shared.hpp new file mode 100644 index 0000000..1ca5286 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_shared.hpp @@ -0,0 +1,1080 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_SHARED_HPP +#define VULKAN_SHARED_HPP + +#include // std::atomic_size_t +#include + +namespace VULKAN_HPP_NAMESPACE +{ +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + + template + class SharedHandleTraits; + + class NoDestructor + { + }; + + template + struct HasDestructorType : std::false_type + { + }; + + template + struct HasDestructorType::DestructorType() )> : std::true_type + { + }; + + template + struct GetDestructorType + { + using type = NoDestructor; + }; + + template + struct GetDestructorType::value>::type> + { + using type = typename SharedHandleTraits::DestructorType; + }; + + template + using DestructorTypeOf = typename GetDestructorType::type; + + template + struct HasDestructor : std::integral_constant, NoDestructor>::value> + { + }; + + //===================================================================================================================== + + template + class SharedHandle; + + template + struct SharedHeader + { + SharedHeader( SharedHandle parent, Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : parent( std::move( parent ) ) + , deleter( std::move( deleter ) ) + { + } + + SharedHandle parent; + Deleter deleter; + }; + + template + struct SharedHeader + { + SharedHeader( Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT : deleter( std::move( deleter ) ) {} + + Deleter deleter; + }; + + //===================================================================================================================== + + template + class ReferenceCounter + { + public: + template + ReferenceCounter( Args &&... control_args ) : m_header( std::forward( control_args )... ) + { + } + + ReferenceCounter( const ReferenceCounter & ) = delete; + ReferenceCounter & operator=( const ReferenceCounter & ) = delete; + + public: + size_t addRef() VULKAN_HPP_NOEXCEPT + { + // Relaxed memory order is sufficient since this does not impose any ordering on other operations + return m_ref_cnt.fetch_add( 1, std::memory_order_relaxed ); + } + + size_t release() VULKAN_HPP_NOEXCEPT + { + // A release memory order to ensure that all releases are ordered + return m_ref_cnt.fetch_sub( 1, std::memory_order_release ); + } + + public: + std::atomic_size_t m_ref_cnt{ 1 }; + HeaderType m_header{}; + }; + + //===================================================================================================================== + + template > + class SharedHandleBase + { + public: + SharedHandleBase() = default; + + template + SharedHandleBase( HandleType handle, Args &&... control_args ) + : m_control( new ReferenceCounter( std::forward( control_args )... ) ), m_handle( handle ) + { + } + + SharedHandleBase( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + o.addRef(); + m_handle = o.m_handle; + m_control = o.m_control; + } + + SharedHandleBase( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT + : m_control( o.m_control ) + , m_handle( o.m_handle ) + { + o.m_handle = nullptr; + o.m_control = nullptr; + } + + SharedHandleBase & operator=( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + SharedHandleBase( o ).swap( *this ); + return *this; + } + + SharedHandleBase & operator=( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT + { + SharedHandleBase( std::move( o ) ).swap( *this ); + return *this; + } + + ~SharedHandleBase() + { + // only this function owns the last reference to the control block + // the same principle is used in the default deleter of std::shared_ptr + if ( m_control && ( m_control->release() == 1 ) ) + { + // noop in x86, but does thread synchronization in ARM + // it is required to ensure that last thread is getting to destroy the control block + // by ordering all atomic operations before this fence + std::atomic_thread_fence( std::memory_order_acquire ); + ForwardType::internalDestroy( getHeader(), m_handle ); + delete m_control; + } + } + + public: + HandleType get() const VULKAN_HPP_NOEXCEPT + { + return m_handle; + } + + HandleType operator*() const VULKAN_HPP_NOEXCEPT + { + return m_handle; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return bool( m_handle ); + } + + const HandleType * operator->() const VULKAN_HPP_NOEXCEPT + { + return &m_handle; + } + + HandleType * operator->() VULKAN_HPP_NOEXCEPT + { + return &m_handle; + } + + void reset() VULKAN_HPP_NOEXCEPT + { + SharedHandleBase().swap( *this ); + } + + void swap( SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_handle, o.m_handle ); + std::swap( m_control, o.m_control ); + } + + template + typename std::enable_if::value, const SharedHandle> &>::type getDestructorType() const VULKAN_HPP_NOEXCEPT + { + return getHeader().parent; + } + + protected: + template + static typename std::enable_if::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT + { + control.deleter.destroy( handle ); + } + + template + static typename std::enable_if::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT + { + control.deleter.destroy( control.parent.get(), handle ); + } + + const HeaderType & getHeader() const VULKAN_HPP_NOEXCEPT + { + return m_control->m_header; + } + + private: + void addRef() const VULKAN_HPP_NOEXCEPT + { + if ( m_control ) + m_control->addRef(); + } + + protected: + ReferenceCounter * m_control = nullptr; + HandleType m_handle{}; + }; + + template + class SharedHandle : public SharedHandleBase, typename SharedHandleTraits::deleter>> + { + private: + using BaseType = SharedHandleBase, typename SharedHandleTraits::deleter>>; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + template ::value>::type> + explicit SharedHandle( HandleType handle, SharedHandle> parent, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), std::move( deleter ) ) + { + } + + template ::value>::type> + explicit SharedHandle( HandleType handle, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT : BaseType( handle, std::move( deleter ) ) + { + } + + protected: + using BaseType::internalDestroy; + }; + + template + class SharedHandleTraits; + +// Silence the function cast warnings. +# if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + + template + class ObjectDestroyShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = + typename std::conditional::value, + void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const, + void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type; + + using SelectorType = typename std::conditional::value, DestructorType, HandleType>::type; + + template + ObjectDestroyShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &SelectorType::destroy ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + template + typename std::enable_if::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + template + typename std::enable_if::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectFreeShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const; + + template + ObjectFreeShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectReleaseShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const; + + template + ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::release ) ) ) + , m_dispatch( &dispatch ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const DispatchLoaderBase * m_dispatch = nullptr; + }; + + template + class PoolFreeShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using ReturnType = decltype( std::declval().free( PoolType(), 0u, nullptr, Dispatcher() ) ); + + template + using DestroyFunctionPointerType = ReturnType ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const; + + PoolFreeShared() = default; + + template + PoolFreeShared( SharedHandle pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_pool( std::move( pool ) ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const DispatchLoaderBase * m_dispatch = nullptr; + SharedHandle m_pool{}; + }; + +# if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) +# pragma GCC diagnostic pop +# endif + + //====================== + //=== SHARED HANDLEs === + //====================== + + //=== VK_VERSION_1_0 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = NoDestructor; + using deleter = ObjectDestroyShared; + }; + + using SharedInstance = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = NoDestructor; + using deleter = ObjectDestroyShared; + }; + + using SharedDevice = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectFreeShared; + }; + + using SharedDeviceMemory = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedFence = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedSemaphore = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedEvent = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedQueryPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedBuffer = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedBufferView = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedImage = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedImageView = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedShaderModule = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedPipelineCache = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedPipeline = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedPipelineLayout = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedSampler = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedDescriptorPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = PoolFreeShared; + }; + + using SharedDescriptorSet = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedDescriptorSetLayout = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedFramebuffer = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedRenderPass = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedCommandPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = PoolFreeShared; + }; + + using SharedCommandBuffer = SharedHandle; + + //=== VK_VERSION_1_1 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedSamplerYcbcrConversion = SharedHandle; + using SharedSamplerYcbcrConversionKHR = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedDescriptorUpdateTemplate = SharedHandle; + using SharedDescriptorUpdateTemplateKHR = SharedHandle; + + //=== VK_VERSION_1_3 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedPrivateDataSlot = SharedHandle; + using SharedPrivateDataSlotEXT = SharedHandle; + + //=== VK_KHR_surface === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = ObjectDestroyShared; + }; + + using SharedSurfaceKHR = SharedHandle; + + //=== VK_KHR_swapchain === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedSwapchainKHR = SharedHandle; + + //=== VK_KHR_display === + template <> + class SharedHandleTraits + { + public: + using DestructorType = PhysicalDevice; + using deleter = ObjectDestroyShared; + }; + + using SharedDisplayKHR = SharedHandle; + + //=== VK_EXT_debug_report === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = ObjectDestroyShared; + }; + + using SharedDebugReportCallbackEXT = SharedHandle; + + //=== VK_KHR_video_queue === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedVideoSessionKHR = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedVideoSessionParametersKHR = SharedHandle; + + //=== VK_NVX_binary_import === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedCuModuleNVX = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedCuFunctionNVX = SharedHandle; + + //=== VK_EXT_debug_utils === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = ObjectDestroyShared; + }; + + using SharedDebugUtilsMessengerEXT = SharedHandle; + + //=== VK_KHR_acceleration_structure === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedAccelerationStructureKHR = SharedHandle; + + //=== VK_EXT_validation_cache === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedValidationCacheEXT = SharedHandle; + + //=== VK_NV_ray_tracing === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedAccelerationStructureNV = SharedHandle; + + //=== VK_INTEL_performance_query === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedPerformanceConfigurationINTEL = SharedHandle; + + //=== VK_KHR_deferred_host_operations === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedDeferredOperationKHR = SharedHandle; + + //=== VK_NV_device_generated_commands === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedIndirectCommandsLayoutNV = SharedHandle; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedCudaModuleNV = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedCudaFunctionNV = SharedHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedBufferCollectionFUCHSIA = SharedHandle; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedMicromapEXT = SharedHandle; + + //=== VK_NV_optical_flow === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedOpticalFlowSessionNV = SharedHandle; + + //=== VK_EXT_shader_object === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedShaderEXT = SharedHandle; + + enum class SwapchainOwns + { + no, + yes, + }; + + struct ImageHeader : SharedHeader, typename SharedHandleTraits::deleter> + { + ImageHeader( + SharedHandle> parent, + typename SharedHandleTraits::deleter deleter = typename SharedHandleTraits::deleter(), + SwapchainOwns swapchainOwned = SwapchainOwns::no ) VULKAN_HPP_NOEXCEPT + : SharedHeader, typename SharedHandleTraits::deleter>( std::move( parent ), + std::move( deleter ) ) + , swapchainOwned( swapchainOwned ) + { + } + + SwapchainOwns swapchainOwned = SwapchainOwns::no; + }; + + template <> + class SharedHandle : public SharedHandleBase + { + using BaseType = SharedHandleBase; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + explicit SharedHandle( VULKAN_HPP_NAMESPACE::Image handle, + SharedHandle> parent, + SwapchainOwns swapchain_owned = SwapchainOwns::no, + DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), std::move( deleter ), swapchain_owned ) + { + } + + protected: + static void internalDestroy( const ImageHeader & control, VULKAN_HPP_NAMESPACE::Image handle ) VULKAN_HPP_NOEXCEPT + { + if ( control.swapchainOwned == SwapchainOwns::no ) + { + control.deleter.destroy( control.parent.get(), handle ); + } + } + }; + + struct SwapchainHeader + { + SwapchainHeader( SharedHandle surface, + SharedHandle> parent, + typename SharedHandleTraits::deleter deleter = + typename SharedHandleTraits::deleter() ) VULKAN_HPP_NOEXCEPT + : surface( std::move( surface ) ) + , parent( std::move( parent ) ) + , deleter( std::move( deleter ) ) + { + } + + SharedHandle surface{}; + SharedHandle> parent{}; + typename SharedHandleTraits::deleter deleter{}; + }; + + template <> + class SharedHandle : public SharedHandleBase + { + using BaseType = SharedHandleBase; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + explicit SharedHandle( VULKAN_HPP_NAMESPACE::SwapchainKHR handle, + SharedHandle> parent, + SharedHandle surface, + DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( surface ), std::move( parent ), std::move( deleter ) ) + { + } + + public: + const SharedHandle & getSurface() const VULKAN_HPP_NOEXCEPT + { + return getHeader().surface; + } + + protected: + using BaseType::internalDestroy; + }; + + template + class SharedHandleBaseNoDestroy : public SharedHandleBase + { + public: + using SharedHandleBase::SharedHandleBase; + + const DestructorType & getDestructorType() const VULKAN_HPP_NOEXCEPT + { + return SharedHandleBase::getHeader(); + } + + protected: + static void internalDestroy( const DestructorType &, HandleType ) VULKAN_HPP_NOEXCEPT {} + }; + + //=== VK_VERSION_1_0 === + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( PhysicalDevice handle, SharedInstance parent ) noexcept + : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) + { + } + }; + + using SharedPhysicalDevice = SharedHandle; + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( Queue handle, SharedDevice parent ) noexcept : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) {} + }; + + using SharedQueue = SharedHandle; + + //=== VK_KHR_display === + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( DisplayModeKHR handle, SharedDisplayKHR parent ) noexcept + : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) + { + } + }; + + using SharedDisplayModeKHR = SharedHandle; +#endif // !VULKAN_HPP_NO_SMART_HANDLE +} // namespace VULKAN_HPP_NAMESPACE +#endif // VULKAN_SHARED_HPP diff --git a/MacroLibX/third_party/vulkan/vulkan_static_assertions.hpp b/MacroLibX/third_party/vulkan/vulkan_static_assertions.hpp new file mode 100644 index 0000000..7a8b034 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_static_assertions.hpp @@ -0,0 +1,7348 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_STATIC_ASSERTIONS_HPP +#define VULKAN_STATIC_ASSERTIONS_HPP + +#include + +//========================= +//=== static_assertions === +//========================= + +//=== VK_VERSION_1_0 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Extent2D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Extent3D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Offset2D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Offset3D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Rect2D is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BaseInStructure is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BaseOutStructure is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferMemoryBarrier is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchIndirectCommand is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndexedIndirectCommand is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndirectCommand is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageMemoryBarrier is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryBarrier is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne ) == sizeof( VkPipelineCacheHeaderVersionOne ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AllocationCallbacks is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ApplicationInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties ) == sizeof( VkImageFormatProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Instance is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "InstanceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryHeap is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryType is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLimits is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSparseProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Device is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExtensionProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Queue is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MappedMemoryRange is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeviceMemory is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryRequirements is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindSparseInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseBufferMemoryBindInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageFormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryBind is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryBindInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryRequirements is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseMemoryBind is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Fence is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Semaphore is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Event is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "EventCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "QueryPool is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Buffer is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferView is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferViewCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Image is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComponentMapping is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresourceRange is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageView is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderModule is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderModuleCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PipelineCache is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCacheCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComputePipelineCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsPipelineCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Pipeline is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineColorBlendAttachmentState is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SpecializationInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SpecializationMapEntry is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StencilOpState is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputAttributeDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputBindingDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Viewport is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineLayoutCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushConstantRange is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Sampler is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyDescriptorSet is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPool is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPoolCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPoolSize is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSet is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutBinding is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSet is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription ) == sizeof( VkAttachmentDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentReference is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Framebuffer is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RenderPass is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDependency is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandPool is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandPoolCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandBuffer is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferImageCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearAttachment is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) == sizeof( VkClearColorValue ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearColorValue is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClearDepthStencilValue is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ClearRect is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) == sizeof( VkClearValue ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ClearValue is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageBlit is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageResolve is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresourceLayers is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassBeginInfo is not nothrow_move_constructible!" ); + +//=== VK_VERSION_1_1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindBufferMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImageMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryDedicatedRequirements is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryAllocateFlagsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupBindSparseInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGroupProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryRequirements2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageMemoryRequirements2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFeatures2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SparseImageFormatProperties2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "InputAttachmentAspectReference is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewUsageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) == + sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ProtectedSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImagePlaneMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) == + sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) == + sizeof( VkSamplerYcbcrConversionImageFormatProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversion is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplate is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalImageFormatProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalBufferProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceIDProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFenceProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportFenceCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportSemaphoreCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalSemaphoreProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutSupport is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!" ); + +//=== VK_VERSION_1_2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatListCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreateInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentDescription2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentReference2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDescription2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDependency2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassEndInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ConformanceVersion is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDriverProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) == + sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) == + sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) == + sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageStencilUsageCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerReductionModeCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) == + sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferAttachmentImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) == + sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == + sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == + sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentReferenceStencilLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreTypeCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreWaitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreSignalInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferDeviceAddressInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!" ); + +//=== VK_VERSION_1_3 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features ) == sizeof( VkPhysicalDeviceVulkan13Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties ) == sizeof( VkPhysicalDeviceVulkan13Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo ) == sizeof( VkPipelineCreationFeedbackCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback ) == sizeof( VkPipelineCreationFeedback ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreationFeedback is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures ) == + sizeof( VkPhysicalDeviceShaderTerminateInvocationFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties ) == sizeof( VkPhysicalDeviceToolProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceToolProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures ) == + sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures ) == sizeof( VkPhysicalDevicePrivateDataFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo ) == sizeof( VkDevicePrivateDataCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DevicePrivateDataCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo ) == sizeof( VkPrivateDataSlotCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PrivateDataSlotCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlot ) == sizeof( VkPrivateDataSlot ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PrivateDataSlot is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures ) == + sizeof( VkPhysicalDevicePipelineCreationCacheControlFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier2 ) == sizeof( VkMemoryBarrier2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryBarrier2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 ) == sizeof( VkBufferMemoryBarrier2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferMemoryBarrier2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 ) == sizeof( VkImageMemoryBarrier2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageMemoryBarrier2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DependencyInfo ) == sizeof( VkDependencyInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DependencyInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo2 ) == sizeof( VkSubmitInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SubmitInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo ) == sizeof( VkSemaphoreSubmitInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo ) == sizeof( VkCommandBufferSubmitInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferSubmitInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features ) == sizeof( VkPhysicalDeviceSynchronization2Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ) == + sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures ) == sizeof( VkPhysicalDeviceImageRobustnessFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 ) == sizeof( VkCopyBufferInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyBufferInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageInfo2 ) == sizeof( VkCopyImageInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 ) == sizeof( VkCopyBufferToImageInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyBufferToImageInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 ) == sizeof( VkCopyImageToBufferInfo2 ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToBufferInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageInfo2 ) == sizeof( VkBlitImageInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BlitImageInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 ) == sizeof( VkResolveImageInfo2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ResolveImageInfo2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy2 ) == sizeof( VkBufferCopy2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferCopy2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy2 ) == sizeof( VkImageCopy2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageCopy2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit2 ) == sizeof( VkImageBlit2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageBlit2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy2 ) == sizeof( VkBufferImageCopy2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferImageCopy2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve2 ) == sizeof( VkImageResolve2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageResolve2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties ) == + sizeof( VkPhysicalDeviceSubgroupSizeControlProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo ) == + sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties ) == sizeof( VkPhysicalDeviceInlineUniformBlockProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock ) == sizeof( VkWriteDescriptorSetInlineUniformBlock ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures ) == + sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInfo ) == sizeof( VkRenderingInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RenderingInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo ) == sizeof( VkRenderingAttachmentInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAttachmentInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo ) == sizeof( VkPipelineRenderingCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRenderingCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures ) == sizeof( VkPhysicalDeviceDynamicRenderingFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo ) == sizeof( VkCommandBufferInheritanceRenderingInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures ) == + sizeof( VkPhysicalDeviceShaderIntegerDotProductFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties ) == + sizeof( VkPhysicalDeviceShaderIntegerDotProductProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties3 ) == sizeof( VkFormatProperties3 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FormatProperties3 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features ) == sizeof( VkPhysicalDeviceMaintenance4Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties ) == sizeof( VkPhysicalDeviceMaintenance4Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements ) == sizeof( VkDeviceBufferMemoryRequirements ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceBufferMemoryRequirements is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements ) == sizeof( VkDeviceImageMemoryRequirements ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceImageMemoryRequirements is not nothrow_move_constructible!" ); + +//=== VK_KHR_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SurfaceKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFormatKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_swapchain === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SwapchainKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AcquireNextImageInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_display === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeParametersKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlanePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_display_swapchain === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPresentInfoKHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) +//=== VK_KHR_xlib_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) +//=== VK_KHR_xcb_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) +//=== VK_KHR_wayland_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) +//=== VK_KHR_android_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_win32_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_EXT_debug_report === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugReportCallbackEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_AMD_rasterization_order === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) == + sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!" ); + +//=== VK_EXT_debug_marker === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_queue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionParametersKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR ) == sizeof( VkQueueFamilyQueryResultStatusPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyQueryResultStatusPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR ) == sizeof( VkQueueFamilyVideoPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyVideoPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR ) == sizeof( VkVideoProfileInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR ) == sizeof( VkVideoProfileListInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoProfileListInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR ) == sizeof( VkVideoPictureResourceInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoPictureResourceInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR ) == sizeof( VkVideoReferenceSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoReferenceSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR ) == sizeof( VkVideoSessionMemoryRequirementsKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionMemoryRequirementsKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR ) == sizeof( VkBindVideoSessionMemoryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindVideoSessionMemoryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoBeginCodingInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEndCodingInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoCodingControlInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_decode_queue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR ) == sizeof( VkVideoDecodeCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR ) == sizeof( VkVideoDecodeUsageInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeUsageInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_dedicated_allocation === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_transform_feedback === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) == + sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) == + sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NVX_binary_import === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuModuleNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuFunctionNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuModuleCreateInfoNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuFunctionCreateInfoNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuLaunchInfoNVX is not nothrow_move_constructible!" ); + +//=== VK_NVX_image_view_handle === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewHandleInfoNVX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_h264 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR ) == sizeof( VkVideoEncodeH264CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeH264QualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR ) == sizeof( VkVideoEncodeH264SessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeH264SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR ) == sizeof( VkVideoEncodeH264SessionParametersAddInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeH264SessionParametersGetInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersGetInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeH264SessionParametersFeedbackInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR ) == sizeof( VkVideoEncodeH264PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR ) == sizeof( VkVideoEncodeH264DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR ) == sizeof( VkVideoEncodeH264NaluSliceInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264NaluSliceInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR ) == sizeof( VkVideoEncodeH264ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR ) == sizeof( VkVideoEncodeH264RateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264RateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeH264RateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264RateControlLayerInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR ) == sizeof( VkVideoEncodeH264QpKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QpKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR ) == sizeof( VkVideoEncodeH264FrameSizeKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264FrameSizeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeH264GopRemainingFrameInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_h265 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR ) == sizeof( VkVideoEncodeH265CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR ) == sizeof( VkVideoEncodeH265SessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeH265QualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeH265SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR ) == sizeof( VkVideoEncodeH265SessionParametersAddInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeH265SessionParametersGetInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersGetInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeH265SessionParametersFeedbackInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR ) == sizeof( VkVideoEncodeH265PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR ) == sizeof( VkVideoEncodeH265DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR ) == sizeof( VkVideoEncodeH265NaluSliceSegmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265NaluSliceSegmentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR ) == sizeof( VkVideoEncodeH265ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR ) == sizeof( VkVideoEncodeH265RateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265RateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeH265RateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265RateControlLayerInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR ) == sizeof( VkVideoEncodeH265QpKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QpKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR ) == sizeof( VkVideoEncodeH265FrameSizeKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265FrameSizeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeH265GopRemainingFrameInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_decode_h264 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR ) == sizeof( VkVideoDecodeH264ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR ) == sizeof( VkVideoDecodeH264CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR ) == + sizeof( VkVideoDecodeH264SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH264SessionParametersAddInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR ) == sizeof( VkVideoDecodeH264PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH264DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_AMD_texture_gather_bias_lod === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_info === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderResourceUsageAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_dynamic_rendering === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == + sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == + sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_GGP ) +//=== VK_GGP_stream_descriptor_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_GGP*/ + +//=== VK_NV_corner_sampled_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_external_memory_capabilities === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_external_memory === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_NV_external_memory_win32 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_NV_win32_keyed_mutex === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_EXT_validation_flags === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationFlagsEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_VI_NN ) +//=== VK_NN_vi_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViSurfaceCreateInfoNN is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_VI_NN*/ + +//=== VK_EXT_astc_decode_mode === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_pipeline_robustness === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelineRobustnessFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineRobustnessFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT ) == + sizeof( VkPhysicalDevicePipelineRobustnessPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineRobustnessPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT ) == sizeof( VkPipelineRobustnessCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRobustnessCreateInfoEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_external_memory_win32 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_KHR_external_memory_fd === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryFdInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryFdPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetFdInfoKHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_win32_keyed_mutex === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_external_semaphore_win32 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_KHR_external_semaphore_fd === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_push_descriptor === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_conditional_rendering === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) == + sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) == + sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_incremental_present === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentRegionsKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentRegionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RectLayerKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_clip_space_w_scaling === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViewportWScalingNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_display_surface_counter === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilities2EXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_display_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPowerInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceEventInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayEventInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_GOOGLE_display_timing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PastPresentationTimingGOOGLE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentTimesInfoGOOGLE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentTimeGOOGLE is not nothrow_move_constructible!" ); + +//=== VK_NVX_multiview_per_view_attributes === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == + sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" ); + +//=== VK_NV_viewport_swizzle === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ViewportSwizzleNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_discard_rectangles === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) == + sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_conservative_rasterization === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_clip_enable === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_hdr_metadata === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HdrMetadataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "XYColorEXT is not nothrow_move_constructible!" ); + +//=== VK_IMG_relaxed_line_rasterization === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG ) == + sizeof( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRelaxedLineRasterizationFeaturesIMG is not nothrow_move_constructible!" ); + +//=== VK_KHR_shared_presentable_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_KHR_external_fence_win32 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_KHR_external_fence_fd === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportFenceFdInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FenceGetFdInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_performance_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) == + sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceCounterKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) == sizeof( VkPerformanceCounterResultKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceCounterResultKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_get_surface_capabilities2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilities2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFormat2KHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_get_display_properties2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayProperties2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneProperties2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeProperties2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneInfo2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_IOS_MVK ) +//=== VK_MVK_ios_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) +//=== VK_MVK_macos_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +//=== VK_EXT_debug_utils === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsLabelEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) +//=== VK_ANDROID_external_memory_android_hardware_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFormatANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID ) == + sizeof( VkAndroidHardwareBufferFormatProperties2ANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_AMDX_shader_enqueue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueueFeaturesAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEnqueueFeaturesAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueuePropertiesAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEnqueuePropertiesAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX ) == sizeof( VkExecutionGraphPipelineScratchSizeAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExecutionGraphPipelineScratchSizeAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX ) == sizeof( VkExecutionGraphPipelineCreateInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExecutionGraphPipelineCreateInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX ) == sizeof( VkDispatchGraphInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchGraphInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX ) == sizeof( VkDispatchGraphCountInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchGraphCountInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX ) == sizeof( VkPipelineShaderStageNodeCreateInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageNodeCreateInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX ) == sizeof( VkDeviceOrHostAddressConstAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceOrHostAddressConstAMDX is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_EXT_sample_locations === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SampleLocationEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SampleLocationsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentSampleLocationsEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassSampleLocationsEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultisamplePropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_blend_operation_advanced === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == + sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) == + sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_fragment_coverage_to_color === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_acceleration_structure === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) == sizeof( VkDeviceOrHostAddressKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceOrHostAddressKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) == sizeof( VkDeviceOrHostAddressConstKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceOrHostAddressConstKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AabbPositionsKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) == + sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TransformMatrixKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureInstanceKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) == + sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) == sizeof( VkAccelerationStructureGeometryDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR ) == + sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_tracing_pipeline === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_framebuffer_mixed_samples === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) == + sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_shader_sm_builtins === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_drm_format_modifier === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) == + sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) == + sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT ) == sizeof( VkDrmFormatModifierPropertiesList2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT ) == sizeof( VkDrmFormatModifierProperties2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_validation_cache === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationCacheEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_KHR_portability_subset === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR ) == + sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_NV_shading_rate_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShadingRatePaletteNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) == + sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CoarseSampleLocationNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CoarseSampleOrderCustomNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == + sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryTrianglesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryAABBNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeometryDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "GeometryNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) == + sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_representative_fragment_test === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == + sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) == + sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_filter_cubic === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) == + sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_external_memory_host === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) == + sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_clock === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_AMD_pipeline_compiler_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_core_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_decode_h265 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR ) == sizeof( VkVideoDecodeH265ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR ) == sizeof( VkVideoDecodeH265CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR ) == + sizeof( VkVideoDecodeH265SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH265SessionParametersAddInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR ) == sizeof( VkVideoDecodeH265PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH265DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_global_priority === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) == + sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_AMD_memory_overallocation_behavior === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!" ); + +//=== VK_EXT_vertex_attribute_divisor === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_GGP ) +//=== VK_GGP_frame_token === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PresentFrameTokenGGP is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_GGP*/ + +//=== VK_NV_compute_shader_derivatives === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_mesh_shader === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!" ); + +//=== VK_NV_shader_image_footprint === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_scissor_exclusive === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) == + sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_device_diagnostic_checkpoints === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointDataNV is not nothrow_move_constructible!" ); + +//=== VK_INTEL_shader_integer_functions2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == + sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!" ); + +//=== VK_INTEL_performance_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) == sizeof( VkPerformanceValueDataINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceValueDataINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceValueINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceConfigurationINTEL is not nothrow_move_constructible!" ); + +//=== VK_EXT_pci_bus_info === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_AMD_display_native_hdr === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) +//=== VK_FUCHSIA_imagepipe_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) +//=== VK_EXT_metal_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +//=== VK_EXT_fragment_density_map === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_fragment_shading_rate === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR ) == + sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR ) == + sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_core_properties2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!" ); + +//=== VK_AMD_device_coherent_memory === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_image_atomic_int64 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_memory_budget === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_memory_priority === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_surface_protected_capabilities === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_dedicated_allocation_image_aliasing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == + sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_buffer_device_address === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == + sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_validation_features === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ValidationFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_present_wait === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR ) == sizeof( VkPhysicalDevicePresentWaitFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_cooperative_matrix === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_coverage_reduction_mode === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) == + sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_fragment_shader_interlock === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_ycbcr_image_arrays === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_provoking_vertex === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) +//=== VK_EXT_full_screen_exclusive === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +//=== VK_EXT_headless_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_line_rasterization === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) == + sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) == + sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_atomic_float === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_index_type_uint8 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_extended_dynamic_state === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_deferred_host_operations === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeferredOperationKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_pipeline_executable_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == + sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) == sizeof( VkPipelineExecutableStatisticValueKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableStatisticValueKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableStatisticKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) == + sizeof( VkPipelineExecutableInternalRepresentationKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_host_image_copy === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT ) == sizeof( VkMemoryToImageCopyEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryToImageCopyEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT ) == sizeof( VkImageToMemoryCopyEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageToMemoryCopyEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT ) == sizeof( VkCopyMemoryToImageInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT ) == sizeof( VkCopyImageToMemoryInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToMemoryInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT ) == sizeof( VkCopyImageToImageInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToImageInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT ) == sizeof( VkHostImageLayoutTransitionInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageLayoutTransitionInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT ) == sizeof( VkSubresourceHostMemcpySizeEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceHostMemcpySizeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT ) == sizeof( VkHostImageCopyDevicePerformanceQueryEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageCopyDevicePerformanceQueryEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_map_memory2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR ) == sizeof( VkMemoryMapInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryMapInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR ) == sizeof( VkMemoryUnmapInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryUnmapInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_atomic_float2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_surface_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT ) == sizeof( VkSurfacePresentModeEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentModeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT ) == sizeof( VkSurfacePresentScalingCapabilitiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentScalingCapabilitiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT ) == sizeof( VkSurfacePresentModeCompatibilityEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentModeCompatibilityEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_swapchain_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT ) == + sizeof( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSwapchainMaintenance1FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT ) == sizeof( VkSwapchainPresentFenceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentFenceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT ) == sizeof( VkSwapchainPresentModesCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentModesCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT ) == sizeof( VkSwapchainPresentModeInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentModeInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT ) == sizeof( VkSwapchainPresentScalingCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentScalingCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT ) == sizeof( VkReleaseSwapchainImagesInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ReleaseSwapchainImagesInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_device_generated_commands === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsStreamNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_inherited_viewport_scissor === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV ) == + sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV ) == + sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_texel_buffer_alignment === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == + sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_QCOM_render_pass_transform === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM ) == + sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_bias_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthBiasControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthBiasControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT ) == sizeof( VkDepthBiasInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthBiasInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT ) == sizeof( VkDepthBiasRepresentationInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthBiasRepresentationInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_device_memory_report === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == + sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_robustness2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_custom_border_color === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT ) == + sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_pipeline_library === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_present_barrier === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV ) == sizeof( VkPhysicalDevicePresentBarrierFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentBarrierFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV ) == sizeof( VkSurfaceCapabilitiesPresentBarrierNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesPresentBarrierNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV ) == sizeof( VkSwapchainPresentBarrierCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentBarrierCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_present_id === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PresentIdKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR ) == sizeof( VkPhysicalDevicePresentIdFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_queue === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR ) == sizeof( VkVideoEncodeCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR ) == sizeof( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolVideoEncodeFeedbackCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR ) == sizeof( VkVideoEncodeUsageInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeUsageInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR ) == sizeof( VkVideoEncodeRateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR ) == + sizeof( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeQualityLevelInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeQualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR ) == sizeof( VkVideoEncodeQualityLevelInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQualityLevelInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeSessionParametersGetInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeSessionParametersGetInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeSessionParametersFeedbackInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeSessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_device_diagnostics_config === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_NV_cuda_kernel_launch === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleNV ) == sizeof( VkCudaModuleNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CudaModuleNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionNV ) == sizeof( VkCudaFunctionNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaFunctionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV ) == sizeof( VkCudaModuleCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaModuleCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV ) == sizeof( VkCudaFunctionCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaFunctionCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV ) == sizeof( VkCudaLaunchInfoNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaLaunchInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCudaKernelLaunchFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCudaKernelLaunchPropertiesNV is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_NV_low_latency === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV ) == sizeof( VkQueryLowLatencySupportNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryLowLatencySupportNV is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_METAL_EXT ) +//=== VK_EXT_metal_objects === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT ) == sizeof( VkExportMetalObjectCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalObjectCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT ) == sizeof( VkExportMetalObjectsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalObjectsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT ) == sizeof( VkExportMetalDeviceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalDeviceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT ) == sizeof( VkExportMetalCommandQueueInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalCommandQueueInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT ) == sizeof( VkExportMetalBufferInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalBufferInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT ) == sizeof( VkImportMetalBufferInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMetalBufferInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT ) == sizeof( VkExportMetalTextureInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalTextureInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT ) == sizeof( VkImportMetalTextureInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMetalTextureInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT ) == sizeof( VkExportMetalIOSurfaceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT ) == sizeof( VkImportMetalIOSurfaceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT ) == sizeof( VkExportMetalSharedEventInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExportMetalSharedEventInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT ) == sizeof( VkImportMetalSharedEventInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMetalSharedEventInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +//=== VK_KHR_synchronization2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointData2NV is not nothrow_move_constructible!" ); + +//=== VK_EXT_descriptor_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT ) == + sizeof( VkPhysicalDeviceDescriptorBufferPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorBufferFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT ) == sizeof( VkDescriptorAddressInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorAddressInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT ) == sizeof( VkDescriptorBufferBindingInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferBindingInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT ) == + sizeof( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferBindingPushDescriptorBufferHandleEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorDataEXT ) == sizeof( VkDescriptorDataEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT ) == sizeof( VkDescriptorGetInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorGetInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT ) == sizeof( VkBufferCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT ) == sizeof( VkImageCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT ) == sizeof( VkImageViewCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT ) == sizeof( VkSamplerCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT ) == sizeof( VkOpaqueCaptureDescriptorDataCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpaqueCaptureDescriptorDataCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT ) == + sizeof( VkAccelerationStructureCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_graphics_pipeline_library === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ) == + sizeof( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ) == + sizeof( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT ) == sizeof( VkGraphicsPipelineLibraryCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsPipelineLibraryCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_early_and_late_fragment_tests === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) == + sizeof( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_fragment_shader_barycentric === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_subgroup_uniform_control_flow === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_fragment_shading_rate_enums === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) == + sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV ) == + sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_motion_blur === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV ) == + sizeof( VkAccelerationStructureGeometryMotionTrianglesDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV ) == sizeof( VkAccelerationStructureMotionInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV ) == sizeof( VkAccelerationStructureMotionInstanceNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV ) == sizeof( VkAccelerationStructureMotionInstanceDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMotionInstanceDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV ) == + sizeof( VkAccelerationStructureMatrixMotionInstanceNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV ) == sizeof( VkAccelerationStructureSRTMotionInstanceNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SRTDataNV ) == sizeof( VkSRTDataNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SRTDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_mesh_shader === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMeshShaderPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT ) == sizeof( VkDrawMeshTasksIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawMeshTasksIndirectCommandEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_ycbcr_2plane_444_formats === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) == + sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_fragment_density_map2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == + sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_QCOM_rotated_copy_commands === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_KHR_workgroup_memory_explicit_layout === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) == + sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_compression_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageCompressionControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageCompressionControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT ) == sizeof( VkImageCompressionControlEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCompressionControlEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT ) == sizeof( VkImageCompressionPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCompressionPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_attachment_feedback_loop_layout === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ) == + sizeof( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_4444_formats === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_device_fault === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT ) == sizeof( VkPhysicalDeviceFaultFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFaultFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT ) == sizeof( VkDeviceFaultCountsEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultCountsEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT ) == sizeof( VkDeviceFaultInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) == sizeof( VkDeviceFaultAddressInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultAddressInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) == sizeof( VkDeviceFaultVendorInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultVendorInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT ) == sizeof( VkDeviceFaultVendorBinaryHeaderVersionOneEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultVendorBinaryHeaderVersionOneEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_rgba10x6_formats === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) +//=== VK_EXT_directfb_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +//=== VK_EXT_vertex_input_dynamic_state === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputBindingDescription2EXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_physical_device_drm === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT ) == sizeof( VkPhysicalDeviceDrmPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_device_address_binding_report === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT ) == + sizeof( VkPhysicalDeviceAddressBindingReportFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAddressBindingReportFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT ) == sizeof( VkDeviceAddressBindingCallbackDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceAddressBindingCallbackDataEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_clip_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT ) == + sizeof( VkPipelineViewportDepthClipControlCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_primitive_topology_list_restart === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ) == + sizeof( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) +//=== VK_FUCHSIA_external_memory === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) +//=== VK_FUCHSIA_external_semaphore === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) +//=== VK_FUCHSIA_buffer_collection === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA ) == sizeof( VkBufferCollectionFUCHSIA ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA ) == sizeof( VkImportMemoryBufferCollectionFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionImageCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA ) == sizeof( VkBufferConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionBufferCreateInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA ) == sizeof( VkBufferCollectionPropertiesFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA ) == sizeof( VkSysmemColorSpaceFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA ) == sizeof( VkImageConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA ) == sizeof( VkImageFormatConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA ) == sizeof( VkBufferCollectionConstraintsInfoFUCHSIA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +//=== VK_HUAWEI_subpass_shading === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI ) == sizeof( VkSubpassShadingPipelineCreateInfoHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI ) == + sizeof( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_HUAWEI_invocation_mask === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_NV_external_memory_rdma === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV ) == sizeof( VkMemoryGetRemoteAddressInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV ) == sizeof( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_pipeline_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT ) == sizeof( VkPipelinePropertiesIdentifierEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelinePropertiesIdentifierEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelinePropertiesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelinePropertiesFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_frame_boundary === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT ) == sizeof( VkPhysicalDeviceFrameBoundaryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFrameBoundaryFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FrameBoundaryEXT ) == sizeof( VkFrameBoundaryEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FrameBoundaryEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_multisampled_render_to_single_sampled === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) == + sizeof( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT ) == sizeof( VkSubpassResolvePerformanceQueryEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassResolvePerformanceQueryEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT ) == sizeof( VkMultisampledRenderToSingleSampledInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultisampledRenderToSingleSampledInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_extended_dynamic_state2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) +//=== VK_QNX_screen_surface === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +//=== VK_EXT_color_write_enable === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_primitives_generated_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ) == + sizeof( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_tracing_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingMaintenance1FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR ) == sizeof( VkTraceRaysIndirectCommand2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TraceRaysIndirectCommand2KHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_view_min_lod === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT ) == sizeof( VkPhysicalDeviceImageViewMinLodFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT ) == sizeof( VkImageViewMinLodCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_multi_draw === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT ) == sizeof( VkPhysicalDeviceMultiDrawFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT ) == sizeof( VkPhysicalDeviceMultiDrawPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT ) == sizeof( VkMultiDrawInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiDrawInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT ) == sizeof( VkMultiDrawIndexedInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_2d_view_of_3d === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT ) == sizeof( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_tile_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderTileImageFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTileImageFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT ) == sizeof( VkPhysicalDeviceShaderTileImagePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTileImagePropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_opacity_micromap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapBuildInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapUsageEXT ) == sizeof( VkMicromapUsageEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapUsageEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT ) == sizeof( VkMicromapCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapEXT ) == sizeof( VkMicromapEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MicromapEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpacityMicromapFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpacityMicromapPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT ) == sizeof( VkMicromapVersionInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapVersionInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT ) == sizeof( VkCopyMicromapToMemoryInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMicromapToMemoryInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT ) == sizeof( VkCopyMemoryToMicromapInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToMicromapInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT ) == sizeof( VkCopyMicromapInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMicromapInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT ) == sizeof( VkMicromapBuildSizesInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapBuildSizesInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT ) == + sizeof( VkAccelerationStructureTrianglesOpacityMicromapEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureTrianglesOpacityMicromapEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT ) == sizeof( VkMicromapTriangleEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapTriangleEXT is not nothrow_move_constructible!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_NV_displacement_micromap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV ) == + sizeof( VkPhysicalDeviceDisplacementMicromapFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDisplacementMicromapFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV ) == + sizeof( VkPhysicalDeviceDisplacementMicromapPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDisplacementMicromapPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV ) == + sizeof( VkAccelerationStructureTrianglesDisplacementMicromapNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureTrianglesDisplacementMicromapNV is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_HUAWEI_cluster_culling_shader === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_EXT_border_color_swizzle === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) == + sizeof( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT ) == + sizeof( VkSamplerBorderColorComponentMappingCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_pageable_device_local_memory === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ) == + sizeof( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_ARM_shader_core_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM ) == sizeof( VkPhysicalDeviceShaderCorePropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCorePropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_ARM_scheduling_controls === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM ) == sizeof( VkDeviceQueueShaderCoreControlCreateInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueShaderCoreControlCreateInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM ) == + sizeof( VkPhysicalDeviceSchedulingControlsFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSchedulingControlsFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM ) == + sizeof( VkPhysicalDeviceSchedulingControlsPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSchedulingControlsPropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_sliced_view_of_3d === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT ) == sizeof( VkImageViewSlicedCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewSlicedCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_VALVE_descriptor_set_host_mapping === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ) == + sizeof( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE ) == sizeof( VkDescriptorSetBindingReferenceVALVE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetBindingReferenceVALVE is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE ) == sizeof( VkDescriptorSetLayoutHostMappingInfoVALVE ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorSetLayoutHostMappingInfoVALVE is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_clamp_zero_one === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClampZeroOneFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_non_seamless_cube_map === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT ) == + sizeof( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_ARM_render_pass_striped === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM ) == sizeof( VkPhysicalDeviceRenderPassStripedFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRenderPassStripedFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM ) == + sizeof( VkPhysicalDeviceRenderPassStripedPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRenderPassStripedPropertiesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM ) == sizeof( VkRenderPassStripeBeginInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeBeginInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM ) == sizeof( VkRenderPassStripeInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM ) == sizeof( VkRenderPassStripeSubmitInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeSubmitInfoARM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_fragment_density_map_offset === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) == + sizeof( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) == + sizeof( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM ) == sizeof( VkSubpassFragmentDensityMapOffsetEndInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_NV_copy_memory_indirect === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV ) == sizeof( VkCopyMemoryIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV ) == sizeof( VkCopyMemoryToImageIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV ) == sizeof( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCopyMemoryIndirectFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV ) == + sizeof( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCopyMemoryIndirectPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_memory_decompression === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV ) == sizeof( VkDecompressMemoryRegionNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DecompressMemoryRegionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV ) == + sizeof( VkPhysicalDeviceMemoryDecompressionFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryDecompressionFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV ) == + sizeof( VkPhysicalDeviceMemoryDecompressionPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryDecompressionPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_device_generated_commands_compute === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV ) == sizeof( VkComputePipelineIndirectBufferInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComputePipelineIndirectBufferInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV ) == sizeof( VkPipelineIndirectDeviceAddressInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineIndirectDeviceAddressInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV ) == sizeof( VkBindPipelineIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindPipelineIndirectCommandNV is not nothrow_move_constructible!" ); + +//=== VK_NV_linear_color_attachment === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == + sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_compression_control_swapchain === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_QCOM_image_processing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM ) == sizeof( VkImageViewSampleWeightCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewSampleWeightCreateInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessingFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessingFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM ) == + sizeof( VkPhysicalDeviceImageProcessingPropertiesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessingPropertiesQCOM is not nothrow_move_constructible!" ); + +//=== VK_EXT_nested_command_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT ) == + sizeof( VkPhysicalDeviceNestedCommandBufferFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceNestedCommandBufferFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT ) == + sizeof( VkPhysicalDeviceNestedCommandBufferPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceNestedCommandBufferPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_external_memory_acquire_unmodified === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT ) == sizeof( VkExternalMemoryAcquireUnmodifiedEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryAcquireUnmodifiedEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_extended_dynamic_state3 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState3FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState3PropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT ) == sizeof( VkColorBlendEquationEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ColorBlendEquationEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT ) == sizeof( VkColorBlendAdvancedEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ColorBlendAdvancedEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_subpass_merge_feedback === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT ) == + sizeof( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT ) == sizeof( VkRenderPassCreationControlEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreationControlEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT ) == sizeof( VkRenderPassCreationFeedbackInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreationFeedbackInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT ) == sizeof( VkRenderPassCreationFeedbackCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassCreationFeedbackCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassSubpassFeedbackInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassSubpassFeedbackCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_LUNARG_direct_driver_loading === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG ) == sizeof( VkDirectDriverLoadingInfoLUNARG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectDriverLoadingInfoLUNARG is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG ) == sizeof( VkDirectDriverLoadingListLUNARG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectDriverLoadingListLUNARG is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_module_identifier === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderModuleIdentifierFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT ) == + sizeof( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderModuleIdentifierPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT ) == + sizeof( VkPipelineShaderStageModuleIdentifierCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageModuleIdentifierCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ) == sizeof( VkShaderModuleIdentifierEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderModuleIdentifierEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_rasterization_order_attachment_access === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ) == + sizeof( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_optical_flow === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV ) == sizeof( VkPhysicalDeviceOpticalFlowFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpticalFlowFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV ) == sizeof( VkPhysicalDeviceOpticalFlowPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpticalFlowPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV ) == sizeof( VkOpticalFlowImageFormatInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowImageFormatInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV ) == sizeof( VkOpticalFlowImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowImageFormatPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV ) == sizeof( VkOpticalFlowSessionNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV ) == sizeof( VkOpticalFlowSessionCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV ) == sizeof( VkOpticalFlowSessionCreatePrivateDataInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionCreatePrivateDataInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV ) == sizeof( VkOpticalFlowExecuteInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowExecuteInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_legacy_dithering === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT ) == sizeof( VkPhysicalDeviceLegacyDitheringFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLegacyDitheringFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_pipeline_protected_access === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) +//=== VK_ANDROID_external_format_resolve === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID ) == + sizeof( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFormatResolveFeaturesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID ) == + sizeof( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFormatResolvePropertiesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatResolvePropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatResolvePropertiesANDROID is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +//=== VK_KHR_maintenance5 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance5FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance5PropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5PropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR ) == sizeof( VkRenderingAreaInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAreaInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR ) == sizeof( VkDeviceImageSubresourceInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceImageSubresourceInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR ) == sizeof( VkImageSubresource2KHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR ) == sizeof( VkSubresourceLayout2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR ) == sizeof( VkPipelineCreateFlags2CreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreateFlags2CreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR ) == sizeof( VkBufferUsageFlags2CreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferUsageFlags2CreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_tracing_position_fetch === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPositionFetchFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_object === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderEXT ) == sizeof( VkShaderEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderObjectFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderObjectFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT ) == sizeof( VkPhysicalDeviceShaderObjectPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderObjectPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT ) == sizeof( VkShaderCreateInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_QCOM_tile_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceTilePropertiesFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM ) == sizeof( VkTilePropertiesQCOM ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TilePropertiesQCOM is not nothrow_move_constructible!" ); + +//=== VK_SEC_amigo_profiling === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC ) == sizeof( VkPhysicalDeviceAmigoProfilingFeaturesSEC ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAmigoProfilingFeaturesSEC is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC ) == sizeof( VkAmigoProfilingSubmitInfoSEC ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AmigoProfilingSubmitInfoSEC is not nothrow_move_constructible!" ); + +//=== VK_QCOM_multiview_per_view_viewports === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ) == + sizeof( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_invocation_reorder === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV ) == + sizeof( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingInvocationReorderPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingInvocationReorderFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_extended_sparse_address_space === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV ) == + sizeof( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV ) == + sizeof( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedSparseAddressSpacePropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_mutable_descriptor_type === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT ) == + sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMutableDescriptorTypeFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT ) == sizeof( VkMutableDescriptorTypeListEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MutableDescriptorTypeListEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT ) == sizeof( VkMutableDescriptorTypeCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MutableDescriptorTypeCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_layer_settings === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT ) == sizeof( VkLayerSettingsCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerSettingsCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerSettingEXT ) == sizeof( VkLayerSettingEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerSettingEXT is not nothrow_move_constructible!" ); + +//=== VK_ARM_shader_core_builtins === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM ) == + sizeof( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreBuiltinsFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM ) == + sizeof( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreBuiltinsPropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_EXT_pipeline_library_group_handles === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_dynamic_rendering_unused_attachments === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ) == + sizeof( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_low_latency2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV ) == sizeof( VkLatencySleepModeInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySleepModeInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepInfoNV ) == sizeof( VkLatencySleepInfoNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySleepInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV ) == sizeof( VkSetLatencyMarkerInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetLatencyMarkerInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV ) == sizeof( VkGetLatencyMarkerInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GetLatencyMarkerInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV ) == sizeof( VkLatencyTimingsFrameReportNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencyTimingsFrameReportNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV ) == sizeof( VkLatencySubmissionPresentIdNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySubmissionPresentIdNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV ) == sizeof( VkSwapchainLatencyCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainLatencyCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV ) == sizeof( VkOutOfBandQueueTypeInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OutOfBandQueueTypeInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV ) == sizeof( VkLatencySurfaceCapabilitiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySurfaceCapabilitiesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_cooperative_matrix === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR ) == sizeof( VkCooperativeMatrixPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_QCOM_multiview_per_view_render_areas === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ) == + sizeof( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM ) == + sizeof( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoMaintenance1FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoMaintenance1FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR ) == sizeof( VkVideoInlineQueryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoInlineQueryInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_per_stage_descriptor_set === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV ) == + sizeof( VkPhysicalDevicePerStageDescriptorSetFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePerStageDescriptorSetFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_QCOM_image_processing2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessing2FeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessing2FeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM ) == + sizeof( VkPhysicalDeviceImageProcessing2PropertiesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessing2PropertiesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM ) == sizeof( VkSamplerBlockMatchWindowCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerBlockMatchWindowCreateInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_filter_cubic_weights === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM ) == sizeof( VkPhysicalDeviceCubicWeightsFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCubicWeightsFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM ) == sizeof( VkSamplerCubicWeightsCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCubicWeightsCreateInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM ) == sizeof( VkBlitImageCubicWeightsInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BlitImageCubicWeightsInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_ycbcr_degamma === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM ) == sizeof( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcrDegammaFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ) == + sizeof( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_filter_cubic_clamp === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM ) == sizeof( VkPhysicalDeviceCubicClampFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCubicClampFeaturesQCOM is not nothrow_move_constructible!" ); + +//=== VK_EXT_attachment_feedback_loop_dynamic_state === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_vertex_attribute_divisor === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesKHR ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR ) == sizeof( VkVertexInputBindingDivisorDescriptionKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputBindingDivisorDescriptionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoKHR ) == + sizeof( VkPipelineVertexInputDivisorStateCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineVertexInputDivisorStateCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesKHR ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorFeaturesKHR is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) +//=== VK_QNX_external_memory_screen_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX ) == sizeof( VkScreenBufferPropertiesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenBufferPropertiesQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX ) == sizeof( VkScreenBufferFormatPropertiesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenBufferFormatPropertiesQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX ) == sizeof( VkImportScreenBufferInfoQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportScreenBufferInfoQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatQNX ) == sizeof( VkExternalFormatQNX ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFormatQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX ) == + sizeof( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +//=== VK_MSFT_layered_driver === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT ) == sizeof( VkPhysicalDeviceLayeredDriverPropertiesMSFT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredDriverPropertiesMSFT is not nothrow_move_constructible!" ); + +//=== VK_KHR_calibrated_timestamps === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR ) == sizeof( VkCalibratedTimestampInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CalibratedTimestampInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_maintenance6 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance6FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance6FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance6PropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance6PropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindMemoryStatusKHR ) == sizeof( VkBindMemoryStatusKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindMemoryStatusKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR ) == sizeof( VkBindDescriptorSetsInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindDescriptorSetsInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR ) == sizeof( VkPushConstantsInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushConstantsInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR ) == sizeof( VkPushDescriptorSetInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDescriptorSetInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR ) == sizeof( VkPushDescriptorSetWithTemplateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDescriptorSetWithTemplateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT ) == sizeof( VkSetDescriptorBufferOffsetsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetDescriptorBufferOffsetsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT ) == + sizeof( VkBindDescriptorBufferEmbeddedSamplersInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindDescriptorBufferEmbeddedSamplersInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_descriptor_pool_overallocation === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV ) == + sizeof( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV is not nothrow_move_constructible!" ); + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_structs.hpp b/MacroLibX/third_party/vulkan/vulkan_structs.hpp new file mode 100644 index 0000000..3327025 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_structs.hpp @@ -0,0 +1,123800 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_STRUCTS_HPP +#define VULKAN_STRUCTS_HPP + +#include // strcmp + +namespace VULKAN_HPP_NAMESPACE +{ + + //=============== + //=== STRUCTS === + //=============== + + struct AabbPositionsKHR + { + using NativeType = VkAabbPositionsKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT + : minX( minX_ ) + , minY( minY_ ) + , minZ( minZ_ ) + , maxX( maxX_ ) + , maxY( maxY_ ) + , maxZ( maxZ_ ) + { + } + + VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AabbPositionsKHR( *reinterpret_cast( &rhs ) ) {} + + AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT + { + minX = minX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT + { + minY = minY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT + { + minZ = minZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT + { + maxX = maxX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT + { + maxY = maxY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT + { + maxZ = maxZ_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minX, minY, minZ, maxX, maxY, maxZ ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AabbPositionsKHR const & ) const = default; +#else + bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ ); +# endif + } + + bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float minX = {}; + float minY = {}; + float minZ = {}; + float maxX = {}; + float maxY = {}; + float maxZ = {}; + }; + + using AabbPositionsNV = AabbPositionsKHR; + + union DeviceOrHostAddressConstKHR + { + using NativeType = VkDeviceOrHostAddressConstKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkDeviceOrHostAddressConstKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressConstKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void * hostAddress; +#else + VkDeviceAddress deviceAddress; + const void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureGeometryTrianglesDataKHR + { + using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryTrianglesDataKHR( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + uint32_t maxVertex_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , vertexFormat( vertexFormat_ ) + , vertexData( vertexData_ ) + , vertexStride( vertexStride_ ) + , maxVertex( maxVertex_ ) + , indexType( indexType_ ) + , indexData( indexData_ ) + , transformData( transformData_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT + { + maxVertex = maxVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & + setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + uint32_t maxVertex = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryTrianglesDataKHR; + }; + + struct AccelerationStructureGeometryAabbsDataKHR + { + using NativeType = VkAccelerationStructureGeometryAabbsDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , data( data_ ) + , stride( stride_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, data, stride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryAabbsDataKHR; + }; + + struct AccelerationStructureGeometryInstancesDataKHR + { + using NativeType = VkAccelerationStructureGeometryInstancesDataKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , arrayOfPointers( arrayOfPointers_ ) + , data( data_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & + setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + { + arrayOfPointers = arrayOfPointers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, arrayOfPointers, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryInstancesDataKHR; + }; + + union AccelerationStructureGeometryDataKHR + { + using NativeType = VkAccelerationStructureGeometryDataKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) + : triangles( triangles_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ ) : aabbs( aabbs_ ) {} + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ ) + : instances( instances_ ) + { + } +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & + setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & + setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & + setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT + { + instances = instances_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkAccelerationStructureGeometryDataKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryDataKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances; +#else + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureGeometryKHR + { + using NativeType = VkAccelerationStructureGeometryKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & + setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, geometryType, geometry, flags ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryKHR; + }; + + union DeviceOrHostAddressKHR + { + using NativeType = VkDeviceOrHostAddressKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkDeviceOrHostAddressKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + void * hostAddress; +#else + VkDeviceAddress deviceAddress; + void * hostAddress; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureBuildGeometryInfoKHR + { + using NativeType = VkAccelerationStructureBuildGeometryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) + , ppGeometries( ppGeometries_ ) + , scratchData( scratchData_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pGeometries_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , srcAccelerationStructure( srcAccelerationStructure_ ) + , dstAccelerationStructure( dstAccelerationStructure_ ) + , geometryCount( static_cast( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ) + , pGeometries( geometries_.data() ) + , ppGeometries( pGeometries_.data() ) + , scratchData( scratchData_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 ); +# else + if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + srcAccelerationStructure = srcAccelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + dstAccelerationStructure = dstAccelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & setGeometries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT + { + ppGeometries = ppGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureBuildGeometryInfoKHR & + setPGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pGeometries_ ) + VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( pGeometries_.size() ); + ppGeometries = pGeometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & + setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureBuildGeometryInfoKHR; + }; + + struct AccelerationStructureBuildRangeInfoKHR + { + using NativeType = VkAccelerationStructureBuildRangeInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_ = {}, + uint32_t primitiveOffset_ = {}, + uint32_t firstVertex_ = {}, + uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveCount( primitiveCount_ ) + , primitiveOffset( primitiveOffset_ ) + , firstVertex( firstVertex_ ) + , transformOffset( transformOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT + { + primitiveCount = primitiveCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT + { + primitiveOffset = primitiveOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( primitiveCount, primitiveOffset, firstVertex, transformOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && ( firstVertex == rhs.firstVertex ) && + ( transformOffset == rhs.transformOffset ); +# endif + } + + bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t primitiveCount = {}; + uint32_t primitiveOffset = {}; + uint32_t firstVertex = {}; + uint32_t transformOffset = {}; + }; + + struct AccelerationStructureBuildSizesInfoKHR + { + using NativeType = VkAccelerationStructureBuildSizesInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , accelerationStructureSize( accelerationStructureSize_ ) + , updateScratchSize( updateScratchSize_ ) + , buildScratchSize( buildScratchSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & + setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureSize = accelerationStructureSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & + setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + updateScratchSize = updateScratchSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & + setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + buildScratchSize = buildScratchSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureSize == rhs.accelerationStructureSize ) && + ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize ); +# endif + } + + bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureBuildSizesInfoKHR; + }; + + struct AccelerationStructureCaptureDescriptorDataInfoEXT + { + using NativeType = VkAccelerationStructureCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , accelerationStructure( accelerationStructure_ ) + , accelerationStructureNV( accelerationStructureNV_ ) + { + } + + VULKAN_HPP_CONSTEXPR + AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCaptureDescriptorDataInfoEXT( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureCaptureDescriptorDataInfoEXT & + operator=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & + setAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureNV = accelerationStructureNV_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure, accelerationStructureNV ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureNV == rhs.accelerationStructureNV ); +# endif + } + + bool operator!=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureCaptureDescriptorDataInfoEXT; + }; + + struct AccelerationStructureCreateInfoKHR + { + using NativeType = VkAccelerationStructureCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , createFlags( createFlags_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + , type( type_ ) + , deviceAddress( deviceAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & + setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT + { + createFlags = createFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureCreateInfoKHR; + }; + + struct GeometryTrianglesNV + { + using NativeType = VkGeometryTrianglesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, + uint32_t vertexCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, + uint32_t indexCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , vertexData( vertexData_ ) + , vertexOffset( vertexOffset_ ) + , vertexCount( vertexCount_ ) + , vertexStride( vertexStride_ ) + , vertexFormat( vertexFormat_ ) + , indexData( indexData_ ) + , indexOffset( indexOffset_ ) + , indexCount( indexCount_ ) + , indexType( indexType_ ) + , transformData( transformData_ ) + , transformOffset( transformOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryTrianglesNV( *reinterpret_cast( &rhs ) ) + { + } + + GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT + { + indexOffset = indexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + vertexData, + vertexOffset, + vertexCount, + vertexStride, + vertexFormat, + indexData, + indexOffset, + indexCount, + indexType, + transformData, + transformOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryTrianglesNV const & ) const = default; +#else + bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && ( vertexOffset == rhs.vertexOffset ) && + ( vertexCount == rhs.vertexCount ) && ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) && + ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && ( indexType == rhs.indexType ) && + ( transformData == rhs.transformData ) && ( transformOffset == rhs.transformOffset ); +# endif + } + + bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; + uint32_t vertexCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; + uint32_t indexCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::Buffer transformData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; + }; + + template <> + struct CppType + { + using Type = GeometryTrianglesNV; + }; + + struct GeometryAABBNV + { + using NativeType = VkGeometryAABBNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, + uint32_t numAABBs_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , aabbData( aabbData_ ) + , numAABBs( numAABBs_ ) + , stride( stride_ ) + , offset( offset_ ) + { + } + + VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryAABBNV( *reinterpret_cast( &rhs ) ) {} + + GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT + { + aabbData = aabbData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT + { + numAABBs = numAABBs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, aabbData, numAABBs, stride, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryAABBNV const & ) const = default; +#else + bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && + ( offset == rhs.offset ); +# endif + } + + bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; + uint32_t numAABBs = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + + template <> + struct CppType + { + using Type = GeometryAABBNV; + }; + + struct GeometryDataNV + { + using NativeType = VkGeometryDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT + : triangles( triangles_ ) + , aabbs( aabbs_ ) + { + } + + VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryDataNV( *reinterpret_cast( &rhs ) ) {} + + GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( triangles, aabbs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryDataNV const & ) const = default; +#else + bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs ); +# endif + } + + bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; + }; + + struct GeometryNV + { + using NativeType = VkGeometryNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryNV( *reinterpret_cast( &rhs ) ) {} + + GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, geometryType, geometry, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeometryNV const & ) const = default; +#else + bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && ( geometry == rhs.geometry ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = GeometryNV; + }; + + struct AccelerationStructureInfoNV + { + using NativeType = VkAccelerationStructureInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, + uint32_t instanceCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( static_cast( geometries_.size() ) ) + , pGeometries( geometries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureInfoNV & + setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & geometries_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = static_cast( geometries_.size() ); + pGeometries = geometries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) && + ( geometryCount == rhs.geometryCount ) && ( pGeometries == rhs.pGeometries ); +# endif + } + + bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; + uint32_t instanceCount = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureInfoNV; + }; + + struct AccelerationStructureCreateInfoNV + { + using NativeType = VkAccelerationStructureCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , compactedSize( compactedSize_ ) + , info( info_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + { + compactedSize = compactedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT + { + info = info_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compactedSize, info ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && ( info == rhs.info ); +# endif + } + + bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureCreateInfoNV; + }; + + struct AccelerationStructureDeviceAddressInfoKHR + { + using NativeType = VkAccelerationStructureDeviceAddressInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , accelerationStructure( accelerationStructure_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureDeviceAddressInfoKHR; + }; + + struct AccelerationStructureGeometryMotionTrianglesDataNV + { + using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , vertexData( vertexData_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryMotionTrianglesDataNV( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryMotionTrianglesDataNV( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryMotionTrianglesDataNV & + operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexData ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryMotionTrianglesDataNV; + }; + + struct TransformMatrixKHR + { + using NativeType = VkTransformMatrixKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix( matrix_ ) {} + + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT : TransformMatrixKHR( *reinterpret_cast( &rhs ) ) {} + + TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array, 3> matrix_ ) VULKAN_HPP_NOEXCEPT + { + matrix = matrix_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( matrix ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TransformMatrixKHR const & ) const = default; +#else + bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( matrix == rhs.matrix ); +# endif + } + + bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; + }; + + using TransformMatrixNV = TransformMatrixKHR; + + struct AccelerationStructureInstanceKHR + { + using NativeType = VkAccelerationStructureInstanceKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transform( transform_ ) + , instanceCustomIndex( instanceCustomIndex_ ) + , mask( mask_ ) + , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) + , flags( flags_ ) + , accelerationStructureReference( accelerationStructureReference_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureInstanceKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default; +#else + bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) && + ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && + ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; + + struct AccelerationStructureMatrixMotionInstanceNV + { + using NativeType = VkAccelerationStructureMatrixMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {}, + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transformT0( transformT0_ ) + , transformT1( transformT1_ ) + , instanceCustomIndex( instanceCustomIndex_ ) + , mask( mask_ ) + , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) + , flags( flags_ ) + , accelerationStructureReference( accelerationStructureReference_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureMatrixMotionInstanceNV( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT + { + transformT0 = transformT0_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT + { + transformT1 = transformT1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureMatrixMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default; +#else + bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && + ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && + ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0 = {}; + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1 = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + + struct AccelerationStructureMemoryRequirementsInfoNV + { + using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + , accelerationStructure( accelerationStructure_ ) + { + } + + VULKAN_HPP_CONSTEXPR + AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureMemoryRequirementsInfoNV; + }; + + struct AccelerationStructureMotionInfoNV + { + using NativeType = VkAccelerationStructureMotionInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( uint32_t maxInstances_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxInstances( maxInstances_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMotionInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setMaxInstances( uint32_t maxInstances_ ) VULKAN_HPP_NOEXCEPT + { + maxInstances = maxInstances_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxInstances, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default; +#else + bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInstances == rhs.maxInstances ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMotionInfoNV; + const void * pNext = {}; + uint32_t maxInstances = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureMotionInfoNV; + }; + + struct SRTDataNV + { + using NativeType = VkSRTDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SRTDataNV( float sx_ = {}, + float a_ = {}, + float b_ = {}, + float pvx_ = {}, + float sy_ = {}, + float c_ = {}, + float pvy_ = {}, + float sz_ = {}, + float pvz_ = {}, + float qx_ = {}, + float qy_ = {}, + float qz_ = {}, + float qw_ = {}, + float tx_ = {}, + float ty_ = {}, + float tz_ = {} ) VULKAN_HPP_NOEXCEPT + : sx( sx_ ) + , a( a_ ) + , b( b_ ) + , pvx( pvx_ ) + , sy( sy_ ) + , c( c_ ) + , pvy( pvy_ ) + , sz( sz_ ) + , pvz( pvz_ ) + , qx( qx_ ) + , qy( qy_ ) + , qz( qz_ ) + , qw( qw_ ) + , tx( tx_ ) + , ty( ty_ ) + , tz( tz_ ) + { + } + + VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast( &rhs ) ) {} + + SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) VULKAN_HPP_NOEXCEPT + { + sx = sx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setA( float a_ ) VULKAN_HPP_NOEXCEPT + { + a = a_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setB( float b_ ) VULKAN_HPP_NOEXCEPT + { + b = b_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvx( float pvx_ ) VULKAN_HPP_NOEXCEPT + { + pvx = pvx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSy( float sy_ ) VULKAN_HPP_NOEXCEPT + { + sy = sy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setC( float c_ ) VULKAN_HPP_NOEXCEPT + { + c = c_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvy( float pvy_ ) VULKAN_HPP_NOEXCEPT + { + pvy = pvy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSz( float sz_ ) VULKAN_HPP_NOEXCEPT + { + sz = sz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvz( float pvz_ ) VULKAN_HPP_NOEXCEPT + { + pvz = pvz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQx( float qx_ ) VULKAN_HPP_NOEXCEPT + { + qx = qx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQy( float qy_ ) VULKAN_HPP_NOEXCEPT + { + qy = qy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQz( float qz_ ) VULKAN_HPP_NOEXCEPT + { + qz = qz_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQw( float qw_ ) VULKAN_HPP_NOEXCEPT + { + qw = qw_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTx( float tx_ ) VULKAN_HPP_NOEXCEPT + { + tx = tx_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTy( float ty_ ) VULKAN_HPP_NOEXCEPT + { + ty = ty_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTz( float tz_ ) VULKAN_HPP_NOEXCEPT + { + tz = tz_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SRTDataNV const & ) const = default; +#else + bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sx == rhs.sx ) && ( a == rhs.a ) && ( b == rhs.b ) && ( pvx == rhs.pvx ) && ( sy == rhs.sy ) && ( c == rhs.c ) && ( pvy == rhs.pvy ) && + ( sz == rhs.sz ) && ( pvz == rhs.pvz ) && ( qx == rhs.qx ) && ( qy == rhs.qy ) && ( qz == rhs.qz ) && ( qw == rhs.qw ) && ( tx == rhs.tx ) && + ( ty == rhs.ty ) && ( tz == rhs.tz ); +# endif + } + + bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float sx = {}; + float a = {}; + float b = {}; + float pvx = {}; + float sy = {}; + float c = {}; + float pvy = {}; + float sz = {}; + float pvz = {}; + float qx = {}; + float qy = {}; + float qz = {}; + float qw = {}; + float tx = {}; + float ty = {}; + float tz = {}; + }; + + struct AccelerationStructureSRTMotionInstanceNV + { + using NativeType = VkAccelerationStructureSRTMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {}, + VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {}, + uint32_t instanceCustomIndex_ = {}, + uint32_t mask_ = {}, + uint32_t instanceShaderBindingTableRecordOffset_ = {}, + VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, + uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT + : transformT0( transformT0_ ) + , transformT1( transformT1_ ) + , instanceCustomIndex( instanceCustomIndex_ ) + , mask( mask_ ) + , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) + , flags( flags_ ) + , accelerationStructureReference( accelerationStructureReference_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setTransformT0( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT + { + transformT0 = transformT0_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setTransformT1( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT + { + transformT1 = transformT1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceCustomIndex = instanceCustomIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT + { + mask = mask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + { + instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; + return *this; + } + + AccelerationStructureSRTMotionInstanceNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = *reinterpret_cast( &flags_ ); + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureReference = accelerationStructureReference_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default; +#else + bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && + ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && + ( accelerationStructureReference == rhs.accelerationStructureReference ); +# endif + } + + bool operator!=( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SRTDataNV transformT0 = {}; + VULKAN_HPP_NAMESPACE::SRTDataNV transformT1 = {}; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; + uint32_t instanceShaderBindingTableRecordOffset : 24; + VkGeometryInstanceFlagsKHR flags : 8; + uint64_t accelerationStructureReference = {}; + }; + + union AccelerationStructureMotionInstanceDataNV + { + using NativeType = VkAccelerationStructureMotionInstanceDataNV; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} ) + : staticInstance( staticInstance_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ ) + : matrixMotionInstance( matrixMotionInstance_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ ) + : srtMotionInstance( srtMotionInstance_ ) + { + } +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & + setStaticInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT + { + staticInstance = staticInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & + setMatrixMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) VULKAN_HPP_NOEXCEPT + { + matrixMotionInstance = matrixMotionInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & + setSrtMotionInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) VULKAN_HPP_NOEXCEPT + { + srtMotionInstance = srtMotionInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkAccelerationStructureMotionInstanceDataNV const &() const + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInstanceDataNV &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance; + VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV srtMotionInstance; +#else + VkAccelerationStructureInstanceKHR staticInstance; + VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct AccelerationStructureMotionInstanceNV + { + using NativeType = VkAccelerationStructureMotionInstanceNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , data( data_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureMotionInstanceNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & + setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, flags, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {}; + }; + + struct MicromapUsageEXT + { + using NativeType = VkMicromapUsageEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT + : count( count_ ) + , subdivisionLevel( subdivisionLevel_ ) + , format( format_ ) + { + } + + VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapUsageEXT( *reinterpret_cast( &rhs ) ) {} + + MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT + { + count = count_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT + { + subdivisionLevel = subdivisionLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( count, subdivisionLevel, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapUsageEXT const & ) const = default; +#else + bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( count == rhs.count ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format ); +# endif + } + + bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t count = {}; + uint32_t subdivisionLevel = {}; + uint32_t format = {}; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct AccelerationStructureTrianglesDisplacementMicromapNV + { + using NativeType = VkAccelerationStructureTrianglesDisplacementMicromapNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV( + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + uint32_t baseTriangle_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , displacementBiasAndScaleFormat( displacementBiasAndScaleFormat_ ) + , displacementVectorFormat( displacementVectorFormat_ ) + , displacementBiasAndScaleBuffer( displacementBiasAndScaleBuffer_ ) + , displacementBiasAndScaleStride( displacementBiasAndScaleStride_ ) + , displacementVectorBuffer( displacementVectorBuffer_ ) + , displacementVectorStride( displacementVectorStride_ ) + , displacedMicromapPrimitiveFlags( displacedMicromapPrimitiveFlags_ ) + , displacedMicromapPrimitiveFlagsStride( displacedMicromapPrimitiveFlagsStride_ ) + , indexType( indexType_ ) + , indexBuffer( indexBuffer_ ) + , indexStride( indexStride_ ) + , baseTriangle( baseTriangle_ ) + , usageCountsCount( usageCountsCount_ ) + , pUsageCounts( pUsageCounts_ ) + , ppUsageCounts( ppUsageCounts_ ) + , micromap( micromap_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesDisplacementMicromapNV( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureTrianglesDisplacementMicromapNV( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureTrianglesDisplacementMicromapNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV( + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_, + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_, + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_, + VULKAN_HPP_NAMESPACE::IndexType indexType_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, + uint32_t baseTriangle_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , displacementBiasAndScaleFormat( displacementBiasAndScaleFormat_ ) + , displacementVectorFormat( displacementVectorFormat_ ) + , displacementBiasAndScaleBuffer( displacementBiasAndScaleBuffer_ ) + , displacementBiasAndScaleStride( displacementBiasAndScaleStride_ ) + , displacementVectorBuffer( displacementVectorBuffer_ ) + , displacementVectorStride( displacementVectorStride_ ) + , displacedMicromapPrimitiveFlags( displacedMicromapPrimitiveFlags_ ) + , displacedMicromapPrimitiveFlagsStride( displacedMicromapPrimitiveFlagsStride_ ) + , indexType( indexType_ ) + , indexBuffer( indexBuffer_ ) + , indexStride( indexStride_ ) + , baseTriangle( baseTriangle_ ) + , usageCountsCount( static_cast( usageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , micromap( micromap_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( usageCounts_.empty() || pUsageCounts_.empty() || ( usageCounts_.size() == pUsageCounts_.size() ) ); +# else + if ( !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureTrianglesDisplacementMicromapNV::AccelerationStructureTrianglesDisplacementMicromapNV: !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureTrianglesDisplacementMicromapNV & + operator=( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureTrianglesDisplacementMicromapNV & operator=( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleFormat( VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleFormat = displacementBiasAndScaleFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorFormat( VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorFormat = displacementVectorFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacementBiasAndScaleBuffer_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleBuffer = displacementBiasAndScaleBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleStride( VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleStride = displacementBiasAndScaleStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacementVectorBuffer_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorBuffer = displacementVectorBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorStride( VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorStride = displacementVectorStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacedMicromapPrimitiveFlags( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacedMicromapPrimitiveFlags_ ) VULKAN_HPP_NOEXCEPT + { + displacedMicromapPrimitiveFlags = displacedMicromapPrimitiveFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacedMicromapPrimitiveFlagsStride( VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_ ) VULKAN_HPP_NOEXCEPT + { + displacedMicromapPrimitiveFlagsStride = displacedMicromapPrimitiveFlagsStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + indexBuffer = indexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT + { + baseTriangle = baseTriangle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureTrianglesDisplacementMicromapNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesDisplacementMicromapNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + displacementBiasAndScaleFormat, + displacementVectorFormat, + displacementBiasAndScaleBuffer, + displacementBiasAndScaleStride, + displacementVectorBuffer, + displacementVectorStride, + displacedMicromapPrimitiveFlags, + displacedMicromapPrimitiveFlagsStride, + indexType, + indexBuffer, + indexStride, + baseTriangle, + usageCountsCount, + pUsageCounts, + ppUsageCounts, + micromap ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + uint32_t baseTriangle = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureTrianglesDisplacementMicromapNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct AccelerationStructureTrianglesOpacityMicromapEXT + { + using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesOpacityMicromapEXT( VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + uint32_t baseTriangle_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , indexType( indexType_ ) + , indexBuffer( indexBuffer_ ) + , indexStride( indexStride_ ) + , baseTriangle( baseTriangle_ ) + , usageCountsCount( usageCountsCount_ ) + , pUsageCounts( pUsageCounts_ ) + , ppUsageCounts( ppUsageCounts_ ) + , micromap( micromap_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT( + VULKAN_HPP_NAMESPACE::IndexType indexType_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, + uint32_t baseTriangle_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , indexType( indexType_ ) + , indexBuffer( indexBuffer_ ) + , indexStride( indexStride_ ) + , baseTriangle( baseTriangle_ ) + , usageCountsCount( static_cast( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , micromap( micromap_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); +# else + if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + indexBuffer = indexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT + { + baseTriangle = baseTriangle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + uint32_t baseTriangle = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureTrianglesOpacityMicromapEXT; + }; + + struct AccelerationStructureVersionInfoKHR + { + using NativeType = VkAccelerationStructureVersionInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pVersionData( pVersionData_ ) + { + } + + VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureVersionInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT + { + pVersionData = pVersionData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pVersionData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default; +#else + bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); +# endif + } + + bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR; + const void * pNext = {}; + const uint8_t * pVersionData = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureVersionInfoKHR; + }; + + struct AcquireNextImageInfoKHR + { + using NativeType = VkAcquireNextImageInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint64_t timeout_ = {}, + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + uint32_t deviceMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchain( swapchain_ ) + , timeout( timeout_ ) + , semaphore( semaphore_ ) + , fence( fence_ ) + , deviceMask( deviceMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AcquireNextImageInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireNextImageInfoKHR const & ) const = default; +#else + bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( timeout == rhs.timeout ) && + ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint64_t timeout = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + uint32_t deviceMask = {}; + }; + + template <> + struct CppType + { + using Type = AcquireNextImageInfoKHR; + }; + + struct AcquireProfilingLockInfoKHR + { + using NativeType = VkAcquireProfilingLockInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, + uint64_t timeout_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , timeout( timeout_ ) + { + } + + VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AcquireProfilingLockInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, timeout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default; +#else + bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout ); +# endif + } + + bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {}; + uint64_t timeout = {}; + }; + + template <> + struct CppType + { + using Type = AcquireProfilingLockInfoKHR; + }; + + struct AllocationCallbacks + { + using NativeType = VkAllocationCallbacks; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, + PFN_vkAllocationFunction pfnAllocation_ = {}, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, + PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, + PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) + , pfnAllocation( pfnAllocation_ ) + , pfnReallocation( pfnReallocation_ ) + , pfnFree( pfnFree_ ) + , pfnInternalAllocation( pfnInternalAllocation_ ) + , pfnInternalFree( pfnInternalFree_ ) + { + } + + VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( *reinterpret_cast( &rhs ) ) + { + } + + AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnAllocation = pfnAllocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnReallocation = pfnReallocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnFree = pfnFree_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalAllocation = pfnInternalAllocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalFree = pfnInternalFree_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree ); + } +#endif + + bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && ( pfnReallocation == rhs.pfnReallocation ) && + ( pfnFree == rhs.pfnFree ) && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree ); +#endif + } + + bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + void * pUserData = {}; + PFN_vkAllocationFunction pfnAllocation = {}; + PFN_vkReallocationFunction pfnReallocation = {}; + PFN_vkFreeFunction pfnFree = {}; + PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; + PFN_vkInternalFreeNotification pfnInternalFree = {}; + }; + + struct AmigoProfilingSubmitInfoSEC + { + using NativeType = VkAmigoProfilingSubmitInfoSEC; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAmigoProfilingSubmitInfoSEC; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AmigoProfilingSubmitInfoSEC( uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , firstDrawTimestamp( firstDrawTimestamp_ ) + , swapBufferTimestamp( swapBufferTimestamp_ ) + { + } + + VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AmigoProfilingSubmitInfoSEC( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT + : AmigoProfilingSubmitInfoSEC( *reinterpret_cast( &rhs ) ) + { + } + + AmigoProfilingSubmitInfoSEC & operator=( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AmigoProfilingSubmitInfoSEC & operator=( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setFirstDrawTimestamp( uint64_t firstDrawTimestamp_ ) VULKAN_HPP_NOEXCEPT + { + firstDrawTimestamp = firstDrawTimestamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setSwapBufferTimestamp( uint64_t swapBufferTimestamp_ ) VULKAN_HPP_NOEXCEPT + { + swapBufferTimestamp = swapBufferTimestamp_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAmigoProfilingSubmitInfoSEC const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAmigoProfilingSubmitInfoSEC &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, firstDrawTimestamp, swapBufferTimestamp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AmigoProfilingSubmitInfoSEC const & ) const = default; +#else + bool operator==( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( firstDrawTimestamp == rhs.firstDrawTimestamp ) && + ( swapBufferTimestamp == rhs.swapBufferTimestamp ); +# endif + } + + bool operator!=( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAmigoProfilingSubmitInfoSEC; + const void * pNext = {}; + uint64_t firstDrawTimestamp = {}; + uint64_t swapBufferTimestamp = {}; + }; + + template <> + struct CppType + { + using Type = AmigoProfilingSubmitInfoSEC; + }; + + struct ComponentMapping + { + using NativeType = VkComponentMapping; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT + : r( r_ ) + , g( g_ ) + , b( b_ ) + , a( a_ ) + { + } + + VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT : ComponentMapping( *reinterpret_cast( &rhs ) ) {} + + ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT + { + r = r_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT + { + g = g_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT + { + b = b_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT + { + a = a_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( r, g, b, a ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComponentMapping const & ) const = default; +#else + bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a ); +# endif + } + + bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + }; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferFormatProperties2ANDROID + { + using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , format( format_ ) + , externalFormat( externalFormat_ ) + , formatFeatures( formatFeatures_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + externalFormat, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && + ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatProperties2ANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferFormatPropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , format( format_ ) + , externalFormat( externalFormat_ ) + , formatFeatures( formatFeatures_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + externalFormat, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && + ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatPropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferFormatResolvePropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferFormatResolvePropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatResolvePropertiesANDROID( VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , colorAttachmentFormat( colorAttachmentFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatResolvePropertiesANDROID( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatResolvePropertiesANDROID( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferFormatResolvePropertiesANDROID & + operator=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatResolvePropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentFormat == rhs.colorAttachmentFormat ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferPropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferPropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeBits_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , allocationSize( allocationSize_ ) + , memoryTypeBits( memoryTypeBits_ ) + { + } + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allocationSize, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferPropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferUsageANDROID + { + using NativeType = VkAndroidHardwareBufferUsageANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , androidHardwareBufferUsage( androidHardwareBufferUsage_ ) + { + } + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferUsageANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, androidHardwareBufferUsage ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); +# endif + } + + bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; + void * pNext = {}; + uint64_t androidHardwareBufferUsage = {}; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferUsageANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidSurfaceCreateInfoKHR + { + using NativeType = VkAndroidSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, + struct ANativeWindow * window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , window( window_ ) + { + } + + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); +# endif + } + + bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; + struct ANativeWindow * window = {}; + }; + + template <> + struct CppType + { + using Type = AndroidSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ApplicationInfo + { + using NativeType = VkApplicationInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {}, + uint32_t applicationVersion_ = {}, + const char * pEngineName_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pApplicationName( pApplicationName_ ) + , applicationVersion( applicationVersion_ ) + , pEngineName( pEngineName_ ) + , engineVersion( engineVersion_ ) + , apiVersion( apiVersion_ ) + { + } + + VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ApplicationInfo( *reinterpret_cast( &rhs ) ) {} + + ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationName = pApplicationName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT + { + applicationVersion = applicationVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT + { + pEngineName = pEngineName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT + { + engineVersion = engineVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT + { + apiVersion = apiVersion_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pApplicationName != rhs.pApplicationName ) + if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 ) + return cmp; + if ( pEngineName != rhs.pEngineName ) + if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 ) + return cmp; + if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) && + ( applicationVersion == rhs.applicationVersion ) && ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) && + ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); + } + + bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; + const void * pNext = {}; + const char * pApplicationName = {}; + uint32_t applicationVersion = {}; + const char * pEngineName = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; + }; + + template <> + struct CppType + { + using Type = ApplicationInfo; + }; + + struct AttachmentDescription + { + using NativeType = VkAttachmentDescription; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription const & ) const = default; +#else + bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && + ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) && + ( finalLayout == rhs.finalLayout ); +# endif + } + + bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + struct AttachmentDescription2 + { + using NativeType = VkAttachmentDescription2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescription2( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescription2 const & ) const = default; +#else + bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && + ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && + ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout ); +# endif + } + + bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = AttachmentDescription2; + }; + + using AttachmentDescription2KHR = AttachmentDescription2; + + struct AttachmentDescriptionStencilLayout + { + using NativeType = VkAttachmentDescriptionStencilLayout; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stencilInitialLayout( stencilInitialLayout_ ) + , stencilFinalLayout( stencilFinalLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentDescriptionStencilLayout( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & + setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilInitialLayout = stencilInitialLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & + setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilFinalLayout = stencilFinalLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default; +#else + bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) && + ( stencilFinalLayout == rhs.stencilFinalLayout ); +# endif + } + + bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = AttachmentDescriptionStencilLayout; + }; + + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; + + struct AttachmentReference + { + using NativeType = VkAttachmentReference; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ) + , layout( layout_ ) + { + } + + VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( attachment, layout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference const & ) const = default; +#else + bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( attachment == rhs.attachment ) && ( layout == rhs.layout ); +# endif + } + + bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + struct AttachmentReference2 + { + using NativeType = VkAttachmentReference2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , attachment( attachment_ ) + , layout( layout_ ) + , aspectMask( aspectMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReference2( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachment, layout, aspectMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReference2 const & ) const = default; +#else + bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && ( layout == rhs.layout ) && + ( aspectMask == rhs.aspectMask ); +# endif + } + + bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2; + const void * pNext = {}; + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + + template <> + struct CppType + { + using Type = AttachmentReference2; + }; + + using AttachmentReference2KHR = AttachmentReference2; + + struct AttachmentReferenceStencilLayout + { + using NativeType = VkAttachmentReferenceStencilLayout; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stencilLayout( stencilLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentReferenceStencilLayout( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT + { + stencilLayout = stencilLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default; +#else + bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout ); +# endif + } + + bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = AttachmentReferenceStencilLayout; + }; + + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; + + struct AttachmentSampleCountInfoAMD + { + using NativeType = VkAttachmentSampleCountInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AttachmentSampleCountInfoAMD( uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachmentSamples( pColorAttachmentSamples_ ) + , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ ) + { + } + + VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentSampleCountInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AttachmentSampleCountInfoAMD( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentSamples_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , colorAttachmentCount( static_cast( colorAttachmentSamples_.size() ) ) + , pColorAttachmentSamples( colorAttachmentSamples_.data() ) + , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & + setPColorAttachmentSamples( const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentSamples = pColorAttachmentSamples_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AttachmentSampleCountInfoAMD & setColorAttachmentSamples( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentSamples_.size() ); + pColorAttachmentSamples = colorAttachmentSamples_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & + setDepthStencilAttachmentSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + { + depthStencilAttachmentSamples = depthStencilAttachmentSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default; +#else + bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentSamples == rhs.pColorAttachmentSamples ) && ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples ); +# endif + } + + bool operator!=( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentSampleCountInfoAMD; + const void * pNext = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = AttachmentSampleCountInfoAMD; + }; + + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + + struct Extent2D + { + using NativeType = VkExtent2D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + { + } + + VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast( &rhs ) ) {} + + Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent2D const & ) const = default; +#else + bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ); +# endif + } + + bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + }; + + struct SampleLocationEXT + { + using NativeType = VkSampleLocationEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + { + } + + VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationEXT( *reinterpret_cast( &rhs ) ) {} + + SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationEXT const & ) const = default; +#else + bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + + struct SampleLocationsInfoEXT + { + using NativeType = VkSampleLocationsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, + uint32_t sampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( sampleLocationsCount_ ) + , pSampleLocations( pSampleLocations_ ) + { + } + + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SampleLocationsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsPerPixel = sampleLocationsPerPixel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationGridSize = sampleLocationGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = sampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SampleLocationsInfoEXT & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SampleLocationsInfoEXT const & ) const = default; +#else + bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) && + ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && ( sampleLocationsCount == rhs.sampleLocationsCount ) && + ( pSampleLocations == rhs.pSampleLocations ); +# endif + } + + bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; + uint32_t sampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations = {}; + }; + + template <> + struct CppType + { + using Type = SampleLocationsInfoEXT; + }; + + struct AttachmentSampleLocationsEXT + { + using NativeType = VkAttachmentSampleLocationsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentIndex( attachmentIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AttachmentSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + { + } + + AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + attachmentIndex = attachmentIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( attachmentIndex, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default; +#else + bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachmentIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + + struct BaseInStructure + { + using NativeType = VkBaseInStructure; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, + const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : sType( sType_ ) + , pNext( pNext_ ) + { + } + + BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseInStructure( *reinterpret_cast( &rhs ) ) {} + + BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseInStructure const & ) const = default; +#else + bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {}; + }; + + struct BaseOutStructure + { + using NativeType = VkBaseOutStructure; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, + struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : sType( sType_ ) + , pNext( pNext_ ) + { + } + + BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseOutStructure( *reinterpret_cast( &rhs ) ) {} + + BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BaseOutStructure const & ) const = default; +#else + bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo; + struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {}; + }; + + struct BindAccelerationStructureMemoryInfoNV + { + using NativeType = VkBindAccelerationStructureMemoryInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindAccelerationStructureMemoryInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindAccelerationStructureMemoryInfoNV & + setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default; +#else + bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ); +# endif + } + + bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + }; + + template <> + struct CppType + { + using Type = BindAccelerationStructureMemoryInfoNV; + }; + + struct BindBufferMemoryDeviceGroupInfo + { + using NativeType = VkBindBufferMemoryDeviceGroupInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindBufferMemoryDeviceGroupInfo & + setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default; +#else + bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ); +# endif + } + + bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + }; + + template <> + struct CppType + { + using Type = BindBufferMemoryDeviceGroupInfo; + }; + + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + + struct BindBufferMemoryInfo + { + using NativeType = VkBindBufferMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , buffer( buffer_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindBufferMemoryInfo( *reinterpret_cast( &rhs ) ) + { + } + + BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer, memory, memoryOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindBufferMemoryInfo const & ) const = default; +#else + bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ); +# endif + } + + bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + + template <> + struct CppType + { + using Type = BindBufferMemoryInfo; + }; + + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + + struct BindDescriptorBufferEmbeddedSamplersInfoEXT + { + using NativeType = VkBindDescriptorBufferEmbeddedSamplersInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , set( set_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindDescriptorBufferEmbeddedSamplersInfoEXT( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindDescriptorBufferEmbeddedSamplersInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & + setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, set ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindDescriptorBufferEmbeddedSamplersInfoEXT const & ) const = default; +#else + bool operator==( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set ); +# endif + } + + bool operator!=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + }; + + template <> + struct CppType + { + using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT; + }; + + struct BindDescriptorSetsInfoKHR + { + using NativeType = VkBindDescriptorSetsInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorSetsInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfoKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t firstSet_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ = {}, + uint32_t dynamicOffsetCount_ = {}, + const uint32_t * pDynamicOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , firstSet( firstSet_ ) + , descriptorSetCount( descriptorSetCount_ ) + , pDescriptorSets( pDescriptorSets_ ) + , dynamicOffsetCount( dynamicOffsetCount_ ) + , pDynamicOffsets( pDynamicOffsets_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfoKHR( BindDescriptorSetsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindDescriptorSetsInfoKHR( VkBindDescriptorSetsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BindDescriptorSetsInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfoKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t firstSet_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , firstSet( firstSet_ ) + , descriptorSetCount( static_cast( descriptorSets_.size() ) ) + , pDescriptorSets( descriptorSets_.data() ) + , dynamicOffsetCount( static_cast( dynamicOffsets_.size() ) ) + , pDynamicOffsets( dynamicOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindDescriptorSetsInfoKHR & operator=( BindDescriptorSetsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindDescriptorSetsInfoKHR & operator=( VkBindDescriptorSetsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT + { + firstSet = firstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setPDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorSets = pDescriptorSets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfoKHR & + setDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( descriptorSets_.size() ); + pDescriptorSets = descriptorSets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicOffsetCount = dynamicOffsetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicOffsets = pDynamicOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfoKHR & setDynamicOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dynamicOffsetCount = static_cast( dynamicOffsets_.size() ); + pDynamicOffsets = dynamicOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindDescriptorSetsInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindDescriptorSetsInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindDescriptorSetsInfoKHR const & ) const = default; +#else + bool operator==( BindDescriptorSetsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) && + ( descriptorSetCount == rhs.descriptorSetCount ) && ( pDescriptorSets == rhs.pDescriptorSets ) && + ( dynamicOffsetCount == rhs.dynamicOffsetCount ) && ( pDynamicOffsets == rhs.pDynamicOffsets ); +# endif + } + + bool operator!=( BindDescriptorSetsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorSetsInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t firstSet = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets = {}; + uint32_t dynamicOffsetCount = {}; + const uint32_t * pDynamicOffsets = {}; + }; + + template <> + struct CppType + { + using Type = BindDescriptorSetsInfoKHR; + }; + + struct Offset2D + { + using NativeType = VkOffset2D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + { + } + + VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast( &rhs ) ) {} + + Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset2D const & ) const = default; +#else + bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + }; + + struct Rect2D + { + using NativeType = VkRect2D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , extent( extent_ ) + { + } + + VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast( &rhs ) ) {} + + Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRect2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Rect2D const & ) const = default; +#else + bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + }; + + struct BindImageMemoryDeviceGroupInfo + { + using NativeType = VkBindImageMemoryDeviceGroupInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t * pDeviceIndices_ = {}, + uint32_t splitInstanceBindRegionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) + , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & splitInstanceBindRegions_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , deviceIndexCount( static_cast( deviceIndices_.size() ) ) + , pDeviceIndices( deviceIndices_.data() ) + , splitInstanceBindRegionCount( static_cast( splitInstanceBindRegions_.size() ) ) + , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & + setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = static_cast( deviceIndices_.size() ); + pDeviceIndices = deviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = splitInstanceBindRegionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & + setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + pSplitInstanceBindRegions = pSplitInstanceBindRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = static_cast( splitInstanceBindRegions_.size() ); + pSplitInstanceBindRegions = splitInstanceBindRegions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default; +#else + bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ) && + ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); +# endif + } + + bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + const void * pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t * pDeviceIndices = {}; + uint32_t splitInstanceBindRegionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions = {}; + }; + + template <> + struct CppType + { + using Type = BindImageMemoryDeviceGroupInfo; + }; + + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + + struct BindImageMemoryInfo + { + using NativeType = VkBindImageMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo( *reinterpret_cast( &rhs ) ) + { + } + + BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, memory, memoryOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemoryInfo const & ) const = default; +#else + bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ); +# endif + } + + bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + + template <> + struct CppType + { + using Type = BindImageMemoryInfo; + }; + + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + + struct BindImageMemorySwapchainInfoKHR + { + using NativeType = VkBindImageMemorySwapchainInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint32_t imageIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchain( swapchain_ ) + , imageIndex( imageIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImageMemorySwapchainInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT + { + imageIndex = imageIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain, imageIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default; +#else + bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndex == rhs.imageIndex ); +# endif + } + + bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndex = {}; + }; + + template <> + struct CppType + { + using Type = BindImageMemorySwapchainInfoKHR; + }; + + struct BindImagePlaneMemoryInfo + { + using NativeType = VkBindImagePlaneMemoryInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , planeAspect( planeAspect_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindImagePlaneMemoryInfo( *reinterpret_cast( &rhs ) ) + { + } + + BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default; +#else + bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + + template <> + struct CppType + { + using Type = BindImagePlaneMemoryInfo; + }; + + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + + struct BindIndexBufferIndirectCommandNV + { + using NativeType = VkBindIndexBufferIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindIndexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ) + , size( size_ ) + , indexType( indexType_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindIndexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, indexType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); +# endif + } + + bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + + struct BindMemoryStatusKHR + { + using NativeType = VkBindMemoryStatusKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindMemoryStatusKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindMemoryStatusKHR( VULKAN_HPP_NAMESPACE::Result * pResult_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pResult( pResult_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindMemoryStatusKHR( BindMemoryStatusKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindMemoryStatusKHR( VkBindMemoryStatusKHR const & rhs ) VULKAN_HPP_NOEXCEPT : BindMemoryStatusKHR( *reinterpret_cast( &rhs ) ) + { + } + + BindMemoryStatusKHR & operator=( BindMemoryStatusKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindMemoryStatusKHR & operator=( VkBindMemoryStatusKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindMemoryStatusKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindMemoryStatusKHR & setPResult( VULKAN_HPP_NAMESPACE::Result * pResult_ ) VULKAN_HPP_NOEXCEPT + { + pResult = pResult_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindMemoryStatusKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindMemoryStatusKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pResult ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindMemoryStatusKHR const & ) const = default; +#else + bool operator==( BindMemoryStatusKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pResult == rhs.pResult ); +# endif + } + + bool operator!=( BindMemoryStatusKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindMemoryStatusKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Result * pResult = {}; + }; + + template <> + struct CppType + { + using Type = BindMemoryStatusKHR; + }; + + struct BindPipelineIndirectCommandNV + { + using NativeType = VkBindPipelineIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineAddress( pipelineAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindPipelineIndirectCommandNV( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindPipelineIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindPipelineIndirectCommandNV & operator=( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindPipelineIndirectCommandNV & operator=( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV & setPipelineAddress( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ ) VULKAN_HPP_NOEXCEPT + { + pipelineAddress = pipelineAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindPipelineIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindPipelineIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pipelineAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindPipelineIndirectCommandNV const & ) const = default; +#else + bool operator==( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( pipelineAddress == rhs.pipelineAddress ); +# endif + } + + bool operator!=( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress = {}; + }; + + struct BindShaderGroupIndirectCommandNV + { + using NativeType = VkBindShaderGroupIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex( groupIndex_ ) {} + + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindShaderGroupIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT + { + groupIndex = groupIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( groupIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default; +#else + bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( groupIndex == rhs.groupIndex ); +# endif + } + + bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t groupIndex = {}; + }; + + struct SparseMemoryBind + { + using NativeType = VkSparseMemoryBind; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : resourceOffset( resourceOffset_ ) + , size( size_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT : SparseMemoryBind( *reinterpret_cast( &rhs ) ) {} + + SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT + { + resourceOffset = resourceOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( resourceOffset, size, memory, memoryOffset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseMemoryBind const & ) const = default; +#else + bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && + ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + + struct SparseBufferMemoryBindInfo + { + using NativeType = VkSparseBufferMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseBufferMemoryBindInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : buffer( buffer_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseBufferMemoryBindInfo & + setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; + }; + + struct SparseImageOpaqueMemoryBindInfo + { + using NativeType = VkSparseImageOpaqueMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageOpaqueMemoryBindInfo & + setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( image, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds = {}; + }; + + struct ImageSubresource + { + using NativeType = VkImageSubresource; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , arrayLayer( arrayLayer_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource( *reinterpret_cast( &rhs ) ) {} + + ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayer = arrayLayer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, mipLevel, arrayLayer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource const & ) const = default; +#else + bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer ); +# endif + } + + bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t arrayLayer = {}; + }; + + struct Offset3D + { + using NativeType = VkOffset3D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + { + } + + VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast( &rhs ) ) {} + + explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {} + + Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Offset3D const & ) const = default; +#else + bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); +# endif + } + + bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + int32_t z = {}; + }; + + struct Extent3D + { + using NativeType = VkExtent3D; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + { + } + + VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast( &rhs ) ) {} + + explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {} + + Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height, depth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Extent3D const & ) const = default; +#else + bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); +# endif + } + + bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + + struct SparseImageMemoryBind + { + using NativeType = VkSparseImageMemoryBind; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : subresource( subresource_ ) + , offset( offset_ ) + , extent( extent_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBind( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT + { + subresource = subresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subresource, offset, extent, memory, memoryOffset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBind const & ) const = default; +#else + bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D offset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + + struct SparseImageMemoryBindInfo + { + using NativeType = VkSparseImageMemoryBindInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryBindInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) + : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SparseImageMemoryBindInfo & + setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = static_cast( binds_.size() ); + pBinds = binds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( image, bindCount, pBinds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryBindInfo const & ) const = default; +#else + bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); +# endif + } + + bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds = {}; + }; + + struct BindSparseInfo + { + using NativeType = VkBindSparseInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t bufferBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ = {}, + uint32_t imageOpaqueBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, + uint32_t imageBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , bufferBindCount( bufferBindCount_ ) + , pBufferBinds( pBufferBinds_ ) + , imageOpaqueBindCount( imageOpaqueBindCount_ ) + , pImageOpaqueBinds( pImageOpaqueBinds_ ) + , imageBindCount( imageBindCount_ ) + , pImageBinds( pImageBinds_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindSparseInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageOpaqueBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageBinds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , bufferBindCount( static_cast( bufferBinds_.size() ) ) + , pBufferBinds( bufferBinds_.data() ) + , imageOpaqueBindCount( static_cast( imageOpaqueBinds_.size() ) ) + , pImageOpaqueBinds( imageOpaqueBinds_.data() ) + , imageBindCount( static_cast( imageBinds_.size() ) ) + , pImageBinds( imageBinds_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & + setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = bufferBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBufferBinds = pBufferBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setBufferBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = static_cast( bufferBinds_.size() ); + pBufferBinds = bufferBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = imageOpaqueBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & + setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageOpaqueBinds = pImageOpaqueBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageOpaqueBinds( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = static_cast( imageOpaqueBinds_.size() ); + pImageOpaqueBinds = imageOpaqueBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = imageBindCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageBinds = pImageBinds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageBinds_ ) + VULKAN_HPP_NOEXCEPT + { + imageBindCount = static_cast( imageBinds_.size() ); + pImageBinds = imageBinds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindSparseInfo & + setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreCount, + pWaitSemaphores, + bufferBindCount, + pBufferBinds, + imageOpaqueBindCount, + pImageOpaqueBinds, + imageBindCount, + pImageBinds, + signalSemaphoreCount, + pSignalSemaphores ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindSparseInfo const & ) const = default; +#else + bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && ( pBufferBinds == rhs.pBufferBinds ) && + ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && + ( imageBindCount == rhs.imageBindCount ) && ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); +# endif + } + + bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t bufferBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds = {}; + uint32_t imageOpaqueBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {}; + uint32_t imageBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + + template <> + struct CppType + { + using Type = BindSparseInfo; + }; + + struct BindVertexBufferIndirectCommandNV + { + using NativeType = VkBindVertexBufferIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress( bufferAddress_ ) + , size( size_ ) + , stride( stride_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVertexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + }; + + struct BindVideoSessionMemoryInfoKHR + { + using NativeType = VkBindVideoSessionMemoryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindVideoSessionMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryBindIndex( memoryBindIndex_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , memorySize( memorySize_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVideoSessionMemoryInfoKHR( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVideoSessionMemoryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryBindIndex = memoryBindIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT + { + memorySize = memorySize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVideoSessionMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default; +#else + bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memory == rhs.memory ) && + ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize ); +# endif + } + + bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindVideoSessionMemoryInfoKHR; + const void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memorySize = {}; + }; + + template <> + struct CppType + { + using Type = BindVideoSessionMemoryInfoKHR; + }; + + struct BlitImageCubicWeightsInfoQCOM + { + using NativeType = VkBlitImageCubicWeightsInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageCubicWeightsInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BlitImageCubicWeightsInfoQCOM( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cubicWeights( cubicWeights_ ) + { + } + + VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageCubicWeightsInfoQCOM( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : BlitImageCubicWeightsInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + BlitImageCubicWeightsInfoQCOM & operator=( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BlitImageCubicWeightsInfoQCOM & operator=( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setCubicWeights( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT + { + cubicWeights = cubicWeights_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBlitImageCubicWeightsInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageCubicWeightsInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cubicWeights ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BlitImageCubicWeightsInfoQCOM const & ) const = default; +#else + bool operator==( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights ); +# endif + } + + bool operator!=( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageCubicWeightsInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom; + }; + + template <> + struct CppType + { + using Type = BlitImageCubicWeightsInfoQCOM; + }; + + struct ImageSubresourceLayers + { + using NativeType = VkImageSubresourceLayers; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceLayers( *reinterpret_cast( &rhs ) ) + { + } + + ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceLayers const & ) const = default; +#else + bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + + struct ImageBlit2 + { + using NativeType = VkImageBlit2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcSubresource( srcSubresource_ ) + , srcOffsets( srcOffsets_ ) + , dstSubresource( dstSubresource_ ) + , dstOffsets( dstOffsets_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit2( *reinterpret_cast( &rhs ) ) {} + + ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit2 const & ) const = default; +#else + bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets ); +# endif + } + + bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + + template <> + struct CppType + { + using Type = ImageBlit2; + }; + + using ImageBlit2KHR = ImageBlit2; + + struct BlitImageInfo2 + { + using NativeType = VkBlitImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {}, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + , filter( filter_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : BlitImageInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + , filter( filter_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BlitImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT + { + filter = filter_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BlitImageInfo2 const & ) const = default; +#else + bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ) && + ( filter == rhs.filter ); +# endif + } + + bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions = {}; + VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + }; + + template <> + struct CppType + { + using Type = BlitImageInfo2; + }; + + using BlitImageInfo2KHR = BlitImageInfo2; + + struct BufferCaptureDescriptorDataInfoEXT + { + using NativeType = VkBufferCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , buffer( buffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCaptureDescriptorDataInfoEXT( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + BufferCaptureDescriptorDataInfoEXT & operator=( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCaptureDescriptorDataInfoEXT & operator=( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = BufferCaptureDescriptorDataInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionBufferCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , collection( collection_ ) + , index( index_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); +# endif + } + + bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = BufferCollectionBufferCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionConstraintsInfoFUCHSIA + { + using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( uint32_t minBufferCount_ = {}, + uint32_t maxBufferCount_ = {}, + uint32_t minBufferCountForCamping_ = {}, + uint32_t minBufferCountForDedicatedSlack_ = {}, + uint32_t minBufferCountForSharedSlack_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minBufferCount( minBufferCount_ ) + , maxBufferCount( maxBufferCount_ ) + , minBufferCountForCamping( minBufferCountForCamping_ ) + , minBufferCountForDedicatedSlack( minBufferCountForDedicatedSlack_ ) + , minBufferCountForSharedSlack( minBufferCountForSharedSlack_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCount = minBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + maxBufferCount = maxBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForCamping = minBufferCountForCamping_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & + setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT + { + minBufferCountForSharedSlack = minBufferCountForSharedSlack_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minBufferCount == rhs.minBufferCount ) && ( maxBufferCount == rhs.maxBufferCount ) && + ( minBufferCountForCamping == rhs.minBufferCountForCamping ) && ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack ) && + ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack ); +# endif + } + + bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; + const void * pNext = {}; + uint32_t minBufferCount = {}; + uint32_t maxBufferCount = {}; + uint32_t minBufferCountForCamping = {}; + uint32_t minBufferCountForDedicatedSlack = {}; + uint32_t minBufferCountForSharedSlack = {}; + }; + + template <> + struct CppType + { + using Type = BufferCollectionConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , collectionToken( collectionToken_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) VULKAN_HPP_NOEXCEPT + { + collectionToken = collectionToken_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collectionToken ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA; + const void * pNext = {}; + zx_handle_t collectionToken = {}; + }; + + template <> + struct CppType + { + using Type = BufferCollectionCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionImageCreateInfoFUCHSIA + { + using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , collection( collection_ ) + , index( index_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); +# endif + } + + bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = BufferCollectionImageCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct SysmemColorSpaceFUCHSIA + { + using NativeType = VkSysmemColorSpaceFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , colorSpace( colorSpace_ ) + { + } + + VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SysmemColorSpaceFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) VULKAN_HPP_NOEXCEPT + { + colorSpace = colorSpace_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorSpace ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default; +# else + bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorSpace == rhs.colorSpace ); +# endif + } + + bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA; + const void * pNext = {}; + uint32_t colorSpace = {}; + }; + + template <> + struct CppType + { + using Type = SysmemColorSpaceFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferCollectionPropertiesFUCHSIA + { + using NativeType = VkBufferCollectionPropertiesFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( + uint32_t memoryTypeBits_ = {}, + uint32_t bufferCount_ = {}, + uint32_t createInfoIndex_ = {}, + uint64_t sysmemPixelFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryTypeBits( memoryTypeBits_ ) + , bufferCount( bufferCount_ ) + , createInfoIndex( createInfoIndex_ ) + , sysmemPixelFormat( sysmemPixelFormat_ ) + , formatFeatures( formatFeatures_ ) + , sysmemColorSpaceIndex( sysmemColorSpaceIndex_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCollectionPropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT + { + memoryTypeBits = memoryTypeBits_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferCount = bufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setCreateInfoIndex( uint32_t createInfoIndex_ ) VULKAN_HPP_NOEXCEPT + { + createInfoIndex = createInfoIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT + { + sysmemPixelFormat = sysmemPixelFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ ) VULKAN_HPP_NOEXCEPT + { + formatFeatures = formatFeatures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSysmemColorSpaceIndex( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceIndex_ ) VULKAN_HPP_NOEXCEPT + { + sysmemColorSpaceIndex = sysmemColorSpaceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSamplerYcbcrConversionComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & samplerYcbcrConversionComponents_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversionComponents = samplerYcbcrConversionComponents_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSuggestedYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + suggestedYcbcrModel = suggestedYcbcrModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSuggestedYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + suggestedYcbcrRange = suggestedYcbcrRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSuggestedXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + suggestedXChromaOffset = suggestedXChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & + setSuggestedYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + suggestedYChromaOffset = suggestedYChromaOffset_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + memoryTypeBits, + bufferCount, + createInfoIndex, + sysmemPixelFormat, + formatFeatures, + sysmemColorSpaceIndex, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default; +# else + bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ) && ( bufferCount == rhs.bufferCount ) && + ( createInfoIndex == rhs.createInfoIndex ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( formatFeatures == rhs.formatFeatures ) && + ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && + ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && + ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + uint32_t bufferCount = {}; + uint32_t createInfoIndex = {}; + uint64_t sysmemPixelFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = BufferCollectionPropertiesFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct BufferCreateInfo + { + using NativeType = VkBufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::DeviceSize size_, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCreateInfo const & ) const = default; +#else + bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && ( usage == rhs.usage ) && + ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); +# endif + } + + bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + + template <> + struct CppType + { + using Type = BufferCreateInfo; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct BufferConstraintsInfoFUCHSIA + { + using NativeType = VkBufferConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , createInfo( createInfo_ ) + , requiredFormatFeatures( requiredFormatFeatures_ ) + , bufferCollectionConstraints( bufferCollectionConstraints_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo_ ) VULKAN_HPP_NOEXCEPT + { + createInfo = createInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & + setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT + { + requiredFormatFeatures = requiredFormatFeatures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & + setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT + { + bufferCollectionConstraints = bufferCollectionConstraints_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createInfo == rhs.createInfo ) && ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && + ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ); +# endif + } + + bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; + }; + + template <> + struct CppType + { + using Type = BufferConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct BufferCopy + { + using NativeType = VkBufferCopy; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy( *reinterpret_cast( &rhs ) ) {} + + BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcOffset, dstOffset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy const & ) const = default; +#else + bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + struct BufferCopy2 + { + using NativeType = VkBufferCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCopy2( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy2( *reinterpret_cast( &rhs ) ) {} + + BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcOffset, dstOffset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCopy2 const & ) const = default; +#else + bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = BufferCopy2; + }; + + using BufferCopy2KHR = BufferCopy2; + + struct BufferDeviceAddressCreateInfoEXT + { + using NativeType = VkBufferDeviceAddressCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceAddress( deviceAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default; +#else + bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + + template <> + struct CppType + { + using Type = BufferDeviceAddressCreateInfoEXT; + }; + + struct BufferDeviceAddressInfo + { + using NativeType = VkBufferDeviceAddressInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , buffer( buffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferDeviceAddressInfo( *reinterpret_cast( &rhs ) ) + { + } + + BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferDeviceAddressInfo const & ) const = default; +#else + bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = BufferDeviceAddressInfo; + }; + + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; + using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; + + struct BufferImageCopy + { + using NativeType = VkBufferImageCopy; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy( *reinterpret_cast( &rhs ) ) {} + + BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy const & ) const = default; +#else + bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + struct BufferImageCopy2 + { + using NativeType = VkBufferImageCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferImageCopy2( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy2( *reinterpret_cast( &rhs ) ) {} + + BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferImageCopy2 const & ) const = default; +#else + bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && + ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = BufferImageCopy2; + }; + + using BufferImageCopy2KHR = BufferImageCopy2; + + struct BufferMemoryBarrier + { + using NativeType = VkBufferMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier( *reinterpret_cast( &rhs ) ) + { + } + + BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier const & ) const = default; +#else + bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && + ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = BufferMemoryBarrier; + }; + + struct BufferMemoryBarrier2 + { + using NativeType = VkBufferMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryBarrier2( *reinterpret_cast( &rhs ) ) + { + } + + BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryBarrier2 const & ) const = default; +#else + bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = BufferMemoryBarrier2; + }; + + using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; + + struct BufferMemoryRequirementsInfo2 + { + using NativeType = VkBufferMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , buffer( buffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + { + } + + BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = BufferMemoryRequirementsInfo2; + }; + + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + + struct BufferOpaqueCaptureAddressCreateInfo + { + using NativeType = VkBufferOpaqueCaptureAddressCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , opaqueCaptureAddress( opaqueCaptureAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default; +#else + bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); +# endif + } + + bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + + template <> + struct CppType + { + using Type = BufferOpaqueCaptureAddressCreateInfo; + }; + + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + + struct BufferUsageFlags2CreateInfoKHR + { + using NativeType = VkBufferUsageFlags2CreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , usage( usage_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( BufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferUsageFlags2CreateInfoKHR( VkBufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferUsageFlags2CreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + BufferUsageFlags2CreateInfoKHR & operator=( BufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferUsageFlags2CreateInfoKHR & operator=( VkBufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfoKHR & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferUsageFlags2CreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferUsageFlags2CreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferUsageFlags2CreateInfoKHR const & ) const = default; +#else + bool operator==( BufferUsageFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( BufferUsageFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferUsageFlags2CreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage = {}; + }; + + template <> + struct CppType + { + using Type = BufferUsageFlags2CreateInfoKHR; + }; + + struct BufferViewCreateInfo + { + using NativeType = VkBufferViewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , buffer( buffer_ ) + , format( format_ ) + , offset( offset_ ) + , range( range_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferViewCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, buffer, format, offset, range ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferViewCreateInfo const & ) const = default; +#else + bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && ( format == rhs.format ) && + ( offset == rhs.offset ) && ( range == rhs.range ); +# endif + } + + bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + + template <> + struct CppType + { + using Type = BufferViewCreateInfo; + }; + + struct CalibratedTimestampInfoKHR + { + using NativeType = VkCalibratedTimestampInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , timeDomain( timeDomain_ ) + { + } + + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CalibratedTimestampInfoKHR( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CalibratedTimestampInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + CalibratedTimestampInfoKHR & operator=( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CalibratedTimestampInfoKHR & operator=( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ ) VULKAN_HPP_NOEXCEPT + { + timeDomain = timeDomain_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCalibratedTimestampInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCalibratedTimestampInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, timeDomain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CalibratedTimestampInfoKHR const & ) const = default; +#else + bool operator==( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); +# endif + } + + bool operator!=( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice; + }; + + template <> + struct CppType + { + using Type = CalibratedTimestampInfoKHR; + }; + + using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR; + + struct CheckpointData2NV + { + using NativeType = VkCheckpointData2NV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stage( stage_ ) + , pCheckpointMarker( pCheckpointMarker_ ) + { + } + + VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast( &rhs ) ) {} + + CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, pCheckpointMarker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointData2NV const & ) const = default; +#else + bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); +# endif + } + + bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {}; + void * pCheckpointMarker = {}; + }; + + template <> + struct CppType + { + using Type = CheckpointData2NV; + }; + + struct CheckpointDataNV + { + using NativeType = VkCheckpointDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, + void * pCheckpointMarker_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stage( stage_ ) + , pCheckpointMarker( pCheckpointMarker_ ) + { + } + + VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointDataNV( *reinterpret_cast( &rhs ) ) {} + + CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, pCheckpointMarker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CheckpointDataNV const & ) const = default; +#else + bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); +# endif + } + + bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; + void * pCheckpointMarker = {}; + }; + + template <> + struct CppType + { + using Type = CheckpointDataNV; + }; + + union ClearColorValue + { + using NativeType = VkClearColorValue; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} + + VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 ) + : float32{ { { float32_0, float32_1, float32_2, float32_3 } } } + { + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} + + VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 ) + : int32{ { { int32_0, int32_1, int32_2, int32_3 } } } + { + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} + + VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 ) + : uint32{ { { uint32_0, uint32_1, uint32_2, uint32_3 } } } + { + } +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkClearColorValue const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClearColorValue &() + { + return *reinterpret_cast( this ); + } + + VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; + }; + + struct ClearDepthStencilValue + { + using NativeType = VkClearDepthStencilValue; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT + : depth( depth_ ) + , stencil( stencil_ ) + { + } + + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearDepthStencilValue( *reinterpret_cast( &rhs ) ) + { + } + + ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT + { + stencil = stencil_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( depth, stencil ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearDepthStencilValue const & ) const = default; +#else + bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); +# endif + } + + bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float depth = {}; + uint32_t stencil = {}; + }; + + union ClearValue + { + using NativeType = VkClearValue; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {} + + VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT + { + depthStencil = depthStencil_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkClearValue const &() const + { + return *reinterpret_cast( this ); + } + + operator VkClearValue &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::ClearColorValue color; + VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; +#else + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct ClearAttachment + { + using NativeType = VkClearAttachment; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t colorAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , colorAttachment( colorAttachment_ ) + , clearValue( clearValue_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast( &rhs ) ) {} + + ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachment = colorAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, colorAttachment, clearValue ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t colorAttachment = {}; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; + + struct ClearRect + { + using NativeType = VkClearRect; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : rect( rect_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast( &rhs ) ) {} + + ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT + { + rect = rect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearRect &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( rect, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClearRect const & ) const = default; +#else + bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Rect2D rect = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + + struct CoarseSampleLocationNV + { + using NativeType = VkCoarseSampleLocationNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT + : pixelX( pixelX_ ) + , pixelY( pixelY_ ) + , sample( sample_ ) + { + } + + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleLocationNV( *reinterpret_cast( &rhs ) ) + { + } + + CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT + { + pixelX = pixelX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT + { + pixelY = pixelY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT + { + sample = sample_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pixelX, pixelY, sample ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleLocationNV const & ) const = default; +#else + bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); +# endif + } + + bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t pixelX = {}; + uint32_t pixelY = {}; + uint32_t sample = {}; + }; + + struct CoarseSampleOrderCustomNV + { + using NativeType = VkCoarseSampleOrderCustomNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = {}, + uint32_t sampleLocationCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( sampleLocationCount_ ) + , pSampleLocations( pSampleLocations_ ) + { + } + + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleOrderCustomNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, + uint32_t sampleCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleCount = sampleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = sampleLocationCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; +#else + bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && ( sampleLocationCount == rhs.sampleLocationCount ) && + ( pSampleLocations == rhs.pSampleLocations ); +# endif + } + + bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; + uint32_t sampleCount = {}; + uint32_t sampleLocationCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {}; + }; + + struct ColorBlendAdvancedEXT + { + using NativeType = VkColorBlendAdvancedEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, + VULKAN_HPP_NAMESPACE::Bool32 clampResults_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendOp( advancedBlendOp_ ) + , srcPremultiplied( srcPremultiplied_ ) + , dstPremultiplied( dstPremultiplied_ ) + , blendOverlap( blendOverlap_ ) + , clampResults( clampResults_ ) + { + } + + VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ColorBlendAdvancedEXT( *reinterpret_cast( &rhs ) ) + { + } + + ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendOp = advancedBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + { + blendOverlap = blendOverlap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( VULKAN_HPP_NAMESPACE::Bool32 clampResults_ ) VULKAN_HPP_NOEXCEPT + { + clampResults = clampResults_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ColorBlendAdvancedEXT const & ) const = default; +#else + bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( advancedBlendOp == rhs.advancedBlendOp ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) && + ( blendOverlap == rhs.blendOverlap ) && ( clampResults == rhs.clampResults ); +# endif + } + + bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + VULKAN_HPP_NAMESPACE::Bool32 clampResults = {}; + }; + + struct ColorBlendEquationEXT + { + using NativeType = VkColorBlendEquationEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd ) VULKAN_HPP_NOEXCEPT + : srcColorBlendFactor( srcColorBlendFactor_ ) + , dstColorBlendFactor( dstColorBlendFactor_ ) + , colorBlendOp( colorBlendOp_ ) + , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) + , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) + , alphaBlendOp( alphaBlendOp_ ) + { + } + + VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ColorBlendEquationEXT( *reinterpret_cast( &rhs ) ) + { + } + + ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + colorBlendOp = colorBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + alphaBlendOp = alphaBlendOp_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ColorBlendEquationEXT const & ) const = default; +#else + bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && + ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp ); +# endif + } + + bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + }; + + struct CommandBufferAllocateInfo + { + using NativeType = VkCommandBufferAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, + VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , commandPool( commandPool_ ) + , level( level_ ) + , commandBufferCount( commandBufferCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT + { + commandPool = commandPool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT + { + level = level_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandPool, level, commandBufferCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferAllocateInfo const & ) const = default; +#else + bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) && + ( commandBufferCount == rhs.commandBufferCount ); +# endif + } + + bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; + uint32_t commandBufferCount = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferAllocateInfo; + }; + + struct CommandBufferInheritanceInfo + { + using NativeType = VkCommandBufferInheritanceInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , framebuffer( framebuffer_ ) + , occlusionQueryEnable( occlusionQueryEnable_ ) + , queryFlags( queryFlags_ ) + , pipelineStatistics( pipelineStatistics_ ) + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryEnable = occlusionQueryEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + { + queryFlags = queryFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; +#else + bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && + ( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && + ( pipelineStatistics == rhs.pipelineStatistics ); +# endif + } + + bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceInfo; + }; + + struct CommandBufferBeginInfo + { + using NativeType = VkCommandBufferBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pInheritanceInfo( pInheritanceInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & + setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pInheritanceInfo = pInheritanceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pInheritanceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferBeginInfo const & ) const = default; +#else + bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo ); +# endif + } + + bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferBeginInfo; + }; + + struct CommandBufferInheritanceConditionalRenderingInfoEXT + { + using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , conditionalRenderingEnable( conditionalRenderingEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & + setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRenderingEnable = conditionalRenderingEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conditionalRenderingEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; +#else + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); +# endif + } + + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; + }; + + struct CommandBufferInheritanceRenderPassTransformInfoQCOM + { + using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , transform( transform_ ) + , renderArea( renderArea_ ) + { + } + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform, renderArea ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; +#else + bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && ( renderArea == rhs.renderArea ); +# endif + } + + bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; + }; + + struct CommandBufferInheritanceRenderingInfo + { + using NativeType = VkCommandBufferInheritanceRenderingInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachmentFormats( pColorAttachmentFormats_ ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + , rasterizationSamples( rasterizationSamples_ ) + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderingInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + , rasterizationSamples( rasterizationSamples_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default; +#else + bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && + ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) && + ( rasterizationSamples == rhs.rasterizationSamples ); +# endif + } + + bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceRenderingInfo; + }; + + using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; + + struct Viewport + { + using NativeType = VkViewport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , width( width_ ) + , height( height_ ) + , minDepth( minDepth_ ) + , maxDepth( maxDepth_ ) + { + } + + VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast( &rhs ) ) {} + + Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT + { + minDepth = minDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxDepth = maxDepth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkViewport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, width, height, minDepth, maxDepth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Viewport const & ) const = default; +#else + bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) && + ( maxDepth == rhs.maxDepth ); +# endif + } + + bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + float width = {}; + float height = {}; + float minDepth = {}; + float maxDepth = {}; + }; + + struct CommandBufferInheritanceViewportScissorInfoNV + { + using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, + uint32_t viewportDepthCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , viewportScissor2D( viewportScissor2D_ ) + , viewportDepthCount( viewportDepthCount_ ) + , pViewportDepths( pViewportDepths_ ) + { + } + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + viewportScissor2D = viewportScissor2D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportDepthCount = viewportDepthCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT + { + pViewportDepths = pViewportDepths_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default; +#else + bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) && + ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths ); +# endif + } + + bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {}; + uint32_t viewportDepthCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferInheritanceViewportScissorInfoNV; + }; + + struct CommandBufferSubmitInfo + { + using NativeType = VkCommandBufferSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, + uint32_t deviceMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , commandBuffer( commandBuffer_ ) + , deviceMask( deviceMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + commandBuffer = commandBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandBuffer, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferSubmitInfo const & ) const = default; +#else + bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {}; + uint32_t deviceMask = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferSubmitInfo; + }; + + using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; + + struct CommandPoolCreateInfo + { + using NativeType = VkCommandPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandPoolCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPoolCreateInfo const & ) const = default; +#else + bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ); +# endif + } + + bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + }; + + template <> + struct CppType + { + using Type = CommandPoolCreateInfo; + }; + + struct SpecializationMapEntry + { + using NativeType = VkSpecializationMapEntry; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : constantID( constantID_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationMapEntry( *reinterpret_cast( &rhs ) ) + { + } + + SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT + { + constantID = constantID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( constantID, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationMapEntry const & ) const = default; +#else + bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t constantID = {}; + uint32_t offset = {}; + size_t size = {}; + }; + + struct SpecializationInfo + { + using NativeType = VkSpecializationInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, + size_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : mapEntryCount( mapEntryCount_ ) + , pMapEntries( pMapEntries_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + { + } + + VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {} ) + : mapEntryCount( static_cast( mapEntries_.size() ) ) + , pMapEntries( mapEntries_.data() ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = mapEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT + { + pMapEntries = pMapEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SpecializationInfo & + setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = static_cast( mapEntries_.size() ); + pMapEntries = mapEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( mapEntryCount, pMapEntries, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SpecializationInfo const & ) const = default; +#else + bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t mapEntryCount = {}; + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + + struct PipelineShaderStageCreateInfo + { + using NativeType = VkPipelineShaderStageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, + const char * pName_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stage( stage_ ) + , module( module_ ) + , pName( pName_ ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & + setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) && + ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderModule module = {}; + const char * pName = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageCreateInfo; + }; + + struct ComputePipelineCreateInfo + { + using NativeType = VkComputePipelineCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stage( stage_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineCreateInfo const & ) const = default; +#else + bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = ComputePipelineCreateInfo; + }; + + struct ComputePipelineIndirectBufferInfoNV + { + using NativeType = VkComputePipelineIndirectBufferInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineIndirectBufferInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceAddress( deviceAddress_ ) + , size( size_ ) + , pipelineDeviceAddressCaptureReplay( pipelineDeviceAddressCaptureReplay_ ) + { + } + + VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineIndirectBufferInfoNV( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineIndirectBufferInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ComputePipelineIndirectBufferInfoNV & operator=( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ComputePipelineIndirectBufferInfoNV & operator=( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & + setPipelineDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkComputePipelineIndirectBufferInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineIndirectBufferInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress, size, pipelineDeviceAddressCaptureReplay ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineIndirectBufferInfoNV const & ) const = default; +#else + bool operator==( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ) && + ( pipelineDeviceAddressCaptureReplay == rhs.pipelineDeviceAddressCaptureReplay ); +# endif + } + + bool operator!=( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineIndirectBufferInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay = {}; + }; + + template <> + struct CppType + { + using Type = ComputePipelineIndirectBufferInfoNV; + }; + + struct ConditionalRenderingBeginInfoEXT + { + using NativeType = VkConditionalRenderingBeginInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ConditionalRenderingBeginInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer, offset, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; +#else + bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + }; + + template <> + struct CppType + { + using Type = ConditionalRenderingBeginInfoEXT; + }; + + struct ConformanceVersion + { + using NativeType = VkConformanceVersion; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT + : major( major_ ) + , minor( minor_ ) + , subminor( subminor_ ) + , patch( patch_ ) + { + } + + VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast( &rhs ) ) {} + + ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT + { + major = major_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT + { + minor = minor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT + { + subminor = subminor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT + { + patch = patch_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( major, minor, subminor, patch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConformanceVersion const & ) const = default; +#else + bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); +# endif + } + + bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint8_t major = {}; + uint8_t minor = {}; + uint8_t subminor = {}; + uint8_t patch = {}; + }; + + using ConformanceVersionKHR = ConformanceVersion; + + struct CooperativeMatrixPropertiesKHR + { + using NativeType = VkCooperativeMatrixPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, + VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , MSize( MSize_ ) + , NSize( NSize_ ) + , KSize( KSize_ ) + , AType( AType_ ) + , BType( BType_ ) + , CType( CType_ ) + , ResultType( ResultType_ ) + , saturatingAccumulation( saturatingAccumulation_ ) + , scope( scope_ ) + { + } + + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesKHR( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + CooperativeMatrixPropertiesKHR & operator=( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CooperativeMatrixPropertiesKHR & operator=( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT + { + MSize = MSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT + { + NSize = NSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT + { + KSize = KSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ ) VULKAN_HPP_NOEXCEPT + { + AType = AType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ ) VULKAN_HPP_NOEXCEPT + { + BType = BType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ ) VULKAN_HPP_NOEXCEPT + { + CType = CType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setResultType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ ) VULKAN_HPP_NOEXCEPT + { + ResultType = ResultType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & + setSaturatingAccumulation( VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ ) VULKAN_HPP_NOEXCEPT + { + saturatingAccumulation = saturatingAccumulation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesKHR & setScope( VULKAN_HPP_NAMESPACE::ScopeKHR scope_ ) VULKAN_HPP_NOEXCEPT + { + scope = scope_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, ResultType, saturatingAccumulation, scope ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesKHR const & ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && + ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( ResultType == rhs.ResultType ) && + ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ); +# endif + } + + bool operator!=( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesKHR; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {}; + VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice; + }; + + template <> + struct CppType + { + using Type = CooperativeMatrixPropertiesKHR; + }; + + struct CooperativeMatrixPropertiesNV + { + using NativeType = VkCooperativeMatrixPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = {}, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , MSize( MSize_ ) + , NSize( NSize_ ) + , KSize( KSize_ ) + , AType( AType_ ) + , BType( BType_ ) + , CType( CType_ ) + , DType( DType_ ) + , scope( scope_ ) + { + } + + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT + { + MSize = MSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT + { + NSize = NSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT + { + KSize = KSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT + { + AType = AType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT + { + BType = BType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT + { + CType = CType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT + { + DType = DType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT + { + scope = scope_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && + ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( DType == rhs.DType ) && ( scope == rhs.scope ); +# endif + } + + bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = {}; + VULKAN_HPP_NAMESPACE::ScopeNV scope = {}; + }; + + template <> + struct CppType + { + using Type = CooperativeMatrixPropertiesNV; + }; + + struct CopyAccelerationStructureInfoKHR + { + using NativeType = VkCopyAccelerationStructureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; +#else + bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); +# endif + } + + bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + + template <> + struct CppType + { + using Type = CopyAccelerationStructureInfoKHR; + }; + + struct CopyAccelerationStructureToMemoryInfoKHR + { + using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + + template <> + struct CppType + { + using Type = CopyAccelerationStructureToMemoryInfoKHR; + }; + + struct CopyBufferInfo2 + { + using NativeType = VkCopyBufferInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcBuffer( srcBuffer_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferInfo2 const & ) const = default; +#else + bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyBufferInfo2; + }; + + using CopyBufferInfo2KHR = CopyBufferInfo2; + + struct CopyBufferToImageInfo2 + { + using NativeType = VkCopyBufferToImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferToImageInfo2( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyBufferToImageInfo2 const & ) const = default; +#else + bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyBufferToImageInfo2; + }; + + using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; + + struct CopyCommandTransformInfoQCOM + { + using NativeType = VkCopyCommandTransformInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , transform( transform_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyCommandTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default; +#else + bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); +# endif + } + + bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + }; + + template <> + struct CppType + { + using Type = CopyCommandTransformInfoQCOM; + }; + + struct CopyDescriptorSet + { + using NativeType = VkCopyDescriptorSet; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, + uint32_t srcBinding_ = {}, + uint32_t srcArrayElement_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcSet( srcSet_ ) + , srcBinding( srcBinding_ ) + , srcArrayElement( srcArrayElement_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast( &rhs ) ) {} + + CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT + { + srcSet = srcSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT + { + srcBinding = srcBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + srcArrayElement = srcArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyDescriptorSet const & ) const = default; +#else + bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) && + ( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); +# endif + } + + bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; + uint32_t srcBinding = {}; + uint32_t srcArrayElement = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + }; + + template <> + struct CppType + { + using Type = CopyDescriptorSet; + }; + + struct ImageCopy2 + { + using NativeType = VkImageCopy2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast( &rhs ) ) {} + + ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy2 const & ) const = default; +#else + bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + + template <> + struct CppType + { + using Type = ImageCopy2; + }; + + using ImageCopy2KHR = ImageCopy2; + + struct CopyImageInfo2 + { + using NativeType = VkCopyImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageInfo2 const & ) const = default; +#else + bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageInfo2; + }; + + using CopyImageInfo2KHR = CopyImageInfo2; + + struct CopyImageToBufferInfo2 + { + using NativeType = VkCopyImageToBufferInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToBufferInfo2( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToBufferInfo2 const & ) const = default; +#else + bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageToBufferInfo2; + }; + + using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + + struct CopyImageToImageInfoEXT + { + using NativeType = VkCopyImageToImageInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyImageToImageInfoEXT( CopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToImageInfoEXT( VkCopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToImageInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageToImageInfoEXT & operator=( CopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyImageToImageInfoEXT & operator=( VkCopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToImageInfoEXT & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyImageToImageInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToImageInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToImageInfoEXT const & ) const = default; +#else + bool operator==( CopyImageToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToImageInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageToImageInfoEXT; + }; + + struct ImageToMemoryCopyEXT + { + using NativeType = VkImageToMemoryCopyEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopyEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageToMemoryCopyEXT( void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pHostPointer( pHostPointer_ ) + , memoryRowLength( memoryRowLength_ ) + , memoryImageHeight( memoryImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageToMemoryCopyEXT( ImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageToMemoryCopyEXT( VkImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageToMemoryCopyEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageToMemoryCopyEXT & operator=( ImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageToMemoryCopyEXT & operator=( VkImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT + { + memoryRowLength = memoryRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + memoryImageHeight = memoryImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageToMemoryCopyEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageToMemoryCopyEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageToMemoryCopyEXT const & ) const = default; +#else + bool operator==( ImageToMemoryCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && + ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( ImageToMemoryCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageToMemoryCopyEXT; + const void * pNext = {}; + void * pHostPointer = {}; + uint32_t memoryRowLength = {}; + uint32_t memoryImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = ImageToMemoryCopyEXT; + }; + + struct CopyImageToMemoryInfoEXT + { + using NativeType = VkCopyImageToMemoryInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfoEXT( CopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToMemoryInfoEXT( VkCopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToMemoryInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageToMemoryInfoEXT & operator=( CopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyImageToMemoryInfoEXT & operator=( VkCopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToMemoryInfoEXT & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyImageToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, srcImage, srcImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToMemoryInfoEXT const & ) const = default; +#else + bool operator==( CopyImageToMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToMemoryInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageToMemoryInfoEXT; + }; + + struct CopyMemoryIndirectCommandNV + { + using NativeType = VkCopyMemoryIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress( srcAddress_ ) + , dstAddress( dstAddress_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryIndirectCommandNV( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryIndirectCommandNV & operator=( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryIndirectCommandNV & operator=( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT + { + dstAddress = dstAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMemoryIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, dstAddress, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryIndirectCommandNV const & ) const = default; +#else + bool operator==( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( size == rhs.size ); +# endif + } + + bool operator!=( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + struct CopyMemoryToAccelerationStructureInfoKHR + { + using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + }; + + template <> + struct CppType + { + using Type = CopyMemoryToAccelerationStructureInfoKHR; + }; + + struct CopyMemoryToImageIndirectCommandNV + { + using NativeType = VkCopyMemoryToImageIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress( srcAddress_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToImageIndirectCommandNV( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryToImageIndirectCommandNV & operator=( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToImageIndirectCommandNV & operator=( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMemoryToImageIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryToImageIndirectCommandNV const & ) const = default; +#else + bool operator==( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + struct MemoryToImageCopyEXT + { + using NativeType = VkMemoryToImageCopyEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopyEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryToImageCopyEXT( const void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pHostPointer( pHostPointer_ ) + , memoryRowLength( memoryRowLength_ ) + , memoryImageHeight( memoryImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryToImageCopyEXT( MemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryToImageCopyEXT( VkMemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryToImageCopyEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryToImageCopyEXT & operator=( MemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryToImageCopyEXT & operator=( VkMemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT + { + memoryRowLength = memoryRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + memoryImageHeight = memoryImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryToImageCopyEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryToImageCopyEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryToImageCopyEXT const & ) const = default; +#else + bool operator==( MemoryToImageCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && + ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( MemoryToImageCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryToImageCopyEXT; + const void * pNext = {}; + const void * pHostPointer = {}; + uint32_t memoryRowLength = {}; + uint32_t memoryImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = MemoryToImageCopyEXT; + }; + + struct CopyMemoryToImageInfoEXT + { + using NativeType = VkCopyMemoryToImageInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfoEXT( CopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToImageInfoEXT( VkCopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyMemoryToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyMemoryToImageInfoEXT & operator=( CopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToImageInfoEXT & operator=( VkCopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyMemoryToImageInfoEXT & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMemoryToImageInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryToImageInfoEXT const & ) const = default; +#else + bool operator==( CopyMemoryToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyMemoryToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToImageInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyMemoryToImageInfoEXT; + }; + + struct CopyMemoryToMicromapInfoEXT + { + using NativeType = VkCopyMemoryToMicromapInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToMicromapInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToMicromapInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToMicromapInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; + }; + + template <> + struct CppType + { + using Type = CopyMemoryToMicromapInfoEXT; + }; + + struct CopyMicromapInfoEXT + { + using NativeType = VkCopyMicromapInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMicromapInfoEXT const & ) const = default; +#else + bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); +# endif + } + + bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT src = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; + }; + + template <> + struct CppType + { + using Type = CopyMicromapInfoEXT; + }; + + struct CopyMicromapToMemoryInfoEXT + { + using NativeType = VkCopyMicromapToMemoryInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapToMemoryInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , src( src_ ) + , dst( dst_ ) + , mode( mode_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMicromapToMemoryInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT + { + src = src_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + { + dst = dst_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapToMemoryInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; + }; + + template <> + struct CppType + { + using Type = CopyMicromapToMemoryInfoEXT; + }; + + struct CuFunctionCreateInfoNVX + { + using NativeType = VkCuFunctionCreateInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , module( module_ ) + , pName( pName_ ) + { + } + + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuFunctionCreateInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, module, pName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); + } + + bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX module = {}; + const char * pName = {}; + }; + + template <> + struct CppType + { + using Type = CuFunctionCreateInfoNVX; + }; + + struct CuLaunchInfoNVX + { + using NativeType = VkCuLaunchInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( paramCount_ ) + , pParams( pParams_ ) + , extraCount( extraCount_ ) + , pExtras( pExtras_ ) + { + } + + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, + uint32_t gridDimX_, + uint32_t gridDimY_, + uint32_t gridDimZ_, + uint32_t blockDimX_, + uint32_t blockDimY_, + uint32_t blockDimZ_, + uint32_t sharedMemBytes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( params_.size() ) + , pParams( params_.data() ) + , extraCount( extras_.size() ) + , pExtras( extras_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT + { + function = function_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT + { + gridDimX = gridDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT + { + gridDimY = gridDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT + { + gridDimZ = gridDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT + { + blockDimX = blockDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT + { + blockDimY = blockDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT + { + blockDimZ = blockDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT + { + sharedMemBytes = sharedMemBytes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = paramCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT + { + pParams = pParams_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = params_.size(); + pParams = params_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extraCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + { + pExtras = pExtras_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extras_.size(); + pExtras = extras_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuLaunchInfoNVX const & ) const = default; +#else + bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && + ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && + ( pExtras == rhs.pExtras ); +# endif + } + + bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; + }; + + template <> + struct CppType + { + using Type = CuLaunchInfoNVX; + }; + + struct CuModuleCreateInfoNVX + { + using NativeType = VkCuModuleCreateInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + { + } + + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleCreateInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CuModuleCreateInfoNVX & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleCreateInfoNVX const & ) const = default; +#else + bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + + template <> + struct CppType + { + using Type = CuModuleCreateInfoNVX; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct CudaFunctionCreateInfoNV + { + using NativeType = VkCudaFunctionCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaFunctionCreateInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CudaFunctionCreateInfoNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , module( module_ ) + , pName( pName_ ) + { + } + + VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CudaFunctionCreateInfoNV( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CudaFunctionCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + CudaFunctionCreateInfoNV & operator=( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CudaFunctionCreateInfoNV & operator=( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setModule( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCudaFunctionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaFunctionCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, module, pName ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); + } + + bool operator!=( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaFunctionCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CudaModuleNV module = {}; + const char * pName = {}; + }; + + template <> + struct CppType + { + using Type = CudaFunctionCreateInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct CudaLaunchInfoNV + { + using NativeType = VkCudaLaunchInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaLaunchInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( paramCount_ ) + , pParams( pParams_ ) + , extraCount( extraCount_ ) + , pExtras( pExtras_ ) + { + } + + VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CudaLaunchInfoNV( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CudaLaunchInfoNV( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_, + uint32_t gridDimX_, + uint32_t gridDimY_, + uint32_t gridDimZ_, + uint32_t blockDimX_, + uint32_t blockDimY_, + uint32_t blockDimZ_, + uint32_t sharedMemBytes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( params_.size() ) + , pParams( params_.data() ) + , extraCount( extras_.size() ) + , pExtras( extras_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CudaLaunchInfoNV & operator=( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CudaLaunchInfoNV & operator=( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setFunction( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ ) VULKAN_HPP_NOEXCEPT + { + function = function_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT + { + gridDimX = gridDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT + { + gridDimY = gridDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT + { + gridDimZ = gridDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT + { + blockDimX = blockDimX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT + { + blockDimY = blockDimY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT + { + blockDimZ = blockDimZ_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT + { + sharedMemBytes = sharedMemBytes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = paramCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT + { + pParams = pParams_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT + { + paramCount = params_.size(); + pParams = params_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extraCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + { + pExtras = pExtras_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extras_.size(); + pExtras = extras_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCudaLaunchInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaLaunchInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CudaLaunchInfoNV const & ) const = default; +# else + bool operator==( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && + ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && + ( pExtras == rhs.pExtras ); +# endif + } + + bool operator!=( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaLaunchInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CudaFunctionNV function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; + }; + + template <> + struct CppType + { + using Type = CudaLaunchInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct CudaModuleCreateInfoNV + { + using NativeType = VkCudaModuleCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaModuleCreateInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + { + } + + VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CudaModuleCreateInfoNV( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CudaModuleCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CudaModuleCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CudaModuleCreateInfoNV & operator=( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CudaModuleCreateInfoNV & operator=( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CudaModuleCreateInfoNV & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCudaModuleCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCudaModuleCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dataSize, pData ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CudaModuleCreateInfoNV const & ) const = default; +# else + bool operator==( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaModuleCreateInfoNV; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; + }; + + template <> + struct CppType + { + using Type = CudaModuleCreateInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct D3D12FenceSubmitInfoKHR + { + using NativeType = VkD3D12FenceSubmitInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValuesCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + { + } + + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : D3D12FenceSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = waitSemaphoreValuesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & + setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = signalSemaphoreValuesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & + setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; +# else + bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); +# endif + } + + bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreValuesCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValuesCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + + template <> + struct CppType + { + using Type = D3D12FenceSubmitInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct DebugMarkerMarkerInfoEXT + { + using NativeType = VkDebugMarkerMarkerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pMarkerName( pMarkerName_ ) + , color( color_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerMarkerInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT + { + pMarkerName = pMarkerName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pMarkerName, color ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pMarkerName != rhs.pMarkerName ) + if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = color <=> rhs.color; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) && + ( color == rhs.color ); + } + + bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + const void * pNext = {}; + const char * pMarkerName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + + template <> + struct CppType + { + using Type = DebugMarkerMarkerInfoEXT; + }; + + struct DebugMarkerObjectNameInfoEXT + { + using NativeType = VkDebugMarkerObjectNameInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + const char * pObjectName_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , objectType( objectType_ ) + , object( object_ ) + , pObjectName( pObjectName_ ) + { + } + + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, object, pObjectName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) + return cmp; + if ( auto cmp = object <=> rhs.object; cmp != 0 ) + return cmp; + if ( pObjectName != rhs.pObjectName ) + if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && + ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); + } + + bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + const char * pObjectName = {}; + }; + + template <> + struct CppType + { + using Type = DebugMarkerObjectNameInfoEXT; + }; + + struct DebugMarkerObjectTagInfoEXT + { + using NativeType = VkDebugMarkerObjectTagInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + { + } + + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof( T ) ), pTag( tag_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; +#else + bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( tagName == rhs.tagName ) && + ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); +# endif + } + + bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + + template <> + struct CppType + { + using Type = DebugMarkerObjectTagInfoEXT; + }; + + struct DebugReportCallbackCreateInfoEXT + { + using NativeType = VkDebugReportCallbackCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pfnCallback( pfnCallback_ ) + , pUserData( pUserData_ ) + { + } + + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugReportCallbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnCallback = pfnCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pfnCallback, pUserData ); + } +#endif + + bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); +#endif + } + + bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + PFN_vkDebugReportCallbackEXT pfnCallback = {}; + void * pUserData = {}; + }; + + template <> + struct CppType + { + using Type = DebugReportCallbackCreateInfoEXT; + }; + + struct DebugUtilsLabelEXT + { + using NativeType = VkDebugUtilsLabelEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pLabelName( pLabelName_ ) + , color( color_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast( &rhs ) ) {} + + DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT + { + pLabelName = pLabelName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pLabelName, color ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pLabelName != rhs.pLabelName ) + if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = color <=> rhs.color; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) && + ( color == rhs.color ); + } + + bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; + const void * pNext = {}; + const char * pLabelName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsLabelEXT; + }; + + struct DebugUtilsObjectNameInfoEXT + { + using NativeType = VkDebugUtilsObjectNameInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + const char * pObjectName_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , pObjectName( pObjectName_ ) + { + } + + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, objectHandle, pObjectName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) + return cmp; + if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) + return cmp; + if ( pObjectName != rhs.pObjectName ) + if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && + ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); + } + + bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + const char * pObjectName = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsObjectNameInfoEXT; + }; + + struct DebugUtilsMessengerCallbackDataEXT + { + using NativeType = VkDebugUtilsMessengerCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, + const char * pMessageIdName_ = {}, + int32_t messageIdNumber_ = {}, + const char * pMessage_ = {}, + uint32_t queueLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, + uint32_t cmdBufLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, + uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( queueLabelCount_ ) + , pQueueLabels( pQueueLabels_ ) + , cmdBufLabelCount( cmdBufLabelCount_ ) + , pCmdBufLabels( pCmdBufLabels_ ) + , objectCount( objectCount_ ) + , pObjects( pObjects_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, + const char * pMessageIdName_, + int32_t messageIdNumber_, + const char * pMessage_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( static_cast( queueLabels_.size() ) ) + , pQueueLabels( queueLabels_.data() ) + , cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ) + , pCmdBufLabels( cmdBufLabels_.data() ) + , objectCount( static_cast( objects_.size() ) ) + , pObjects( objects_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT + { + pMessageIdName = pMessageIdName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT + { + messageIdNumber = messageIdNumber_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT + { + pMessage = pMessage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = queueLabelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT + { + pQueueLabels = pQueueLabels_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = static_cast( queueLabels_.size() ); + pQueueLabels = queueLabels_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = cmdBufLabelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + pCmdBufLabels = pCmdBufLabels_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); + pCmdBufLabels = cmdBufLabels_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = objectCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT + { + pObjects = pObjects_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = static_cast( objects_.size() ); + pObjects = objects_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( pMessageIdName != rhs.pMessageIdName ) + if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) + return cmp; + if ( pMessage != rhs.pMessage ) + if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) + return cmp; + if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) + return cmp; + if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) + return cmp; + if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) + return cmp; + if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) + return cmp; + if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) && + ( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) && + ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && + ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects ); + } + + bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; + const char * pMessageIdName = {}; + int32_t messageIdNumber = {}; + const char * pMessage = {}; + uint32_t queueLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {}; + uint32_t cmdBufLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsMessengerCallbackDataEXT; + }; + + struct DebugUtilsMessengerCreateInfoEXT + { + using NativeType = VkDebugUtilsMessengerCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , messageSeverity( messageSeverity_ ) + , messageType( messageType_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + { + } + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT + { + messageSeverity = messageSeverity_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT + { + messageType = messageType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData ); + } +#endif + + bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) && + ( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); +#endif + } + + bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsMessengerCreateInfoEXT; + }; + + struct DebugUtilsObjectTagInfoEXT + { + using NativeType = VkDebugUtilsObjectTagInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + { + } + + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; +#else + bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && + ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); +# endif + } + + bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + + template <> + struct CppType + { + using Type = DebugUtilsObjectTagInfoEXT; + }; + + struct DecompressMemoryRegionNV + { + using NativeType = VkDecompressMemoryRegionNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ = {}, + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress( srcAddress_ ) + , dstAddress( dstAddress_ ) + , compressedSize( compressedSize_ ) + , decompressedSize( decompressedSize_ ) + , decompressionMethod( decompressionMethod_ ) + { + } + + VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DecompressMemoryRegionNV( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DecompressMemoryRegionNV( *reinterpret_cast( &rhs ) ) + { + } + + DecompressMemoryRegionNV & operator=( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DecompressMemoryRegionNV & operator=( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT + { + dstAddress = dstAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setCompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ ) VULKAN_HPP_NOEXCEPT + { + compressedSize = compressedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ ) VULKAN_HPP_NOEXCEPT + { + decompressedSize = decompressedSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & + setDecompressionMethod( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ ) VULKAN_HPP_NOEXCEPT + { + decompressionMethod = decompressionMethod_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDecompressMemoryRegionNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDecompressMemoryRegionNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize, decompressionMethod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DecompressMemoryRegionNV const & ) const = default; +#else + bool operator==( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) && + ( decompressedSize == rhs.decompressedSize ) && ( decompressionMethod == rhs.decompressionMethod ); +# endif + } + + bool operator!=( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compressedSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize = {}; + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod = {}; + }; + + struct DedicatedAllocationBufferCreateInfoNV + { + using NativeType = VkDedicatedAllocationBufferCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dedicatedAllocation( dedicatedAllocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); +# endif + } + + bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + + template <> + struct CppType + { + using Type = DedicatedAllocationBufferCreateInfoNV; + }; + + struct DedicatedAllocationImageCreateInfoNV + { + using NativeType = VkDedicatedAllocationImageCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dedicatedAllocation( dedicatedAllocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); +# endif + } + + bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + + template <> + struct CppType + { + using Type = DedicatedAllocationImageCreateInfoNV; + }; + + struct DedicatedAllocationMemoryAllocateInfoNV + { + using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + , buffer( buffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = DedicatedAllocationMemoryAllocateInfoNV; + }; + + struct MemoryBarrier2 + { + using NativeType = VkMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier2( *reinterpret_cast( &rhs ) ) {} + + MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier2 const & ) const = default; +#else + bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ); +# endif + } + + bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + }; + + template <> + struct CppType + { + using Type = MemoryBarrier2; + }; + + using MemoryBarrier2KHR = MemoryBarrier2; + + struct ImageSubresourceRange + { + using NativeType = VkImageSubresourceRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t baseMipLevel_ = {}, + uint32_t levelCount_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , baseMipLevel( baseMipLevel_ ) + , levelCount( levelCount_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceRange( *reinterpret_cast( &rhs ) ) + { + } + + ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT + { + baseMipLevel = baseMipLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT + { + levelCount = levelCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresourceRange const & ) const = default; +#else + bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && ( levelCount == rhs.levelCount ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); +# endif + } + + bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + + struct ImageMemoryBarrier2 + { + using NativeType = VkImageMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcStageMask( srcStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstStageMask( dstStageMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2( *reinterpret_cast( &rhs ) ) + { + } + + ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + srcStageMask, + srcAccessMask, + dstStageMask, + dstAccessMask, + oldLayout, + newLayout, + srcQueueFamilyIndex, + dstQueueFamilyIndex, + image, + subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier2 const & ) const = default; +#else + bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && + ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = ImageMemoryBarrier2; + }; + + using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; + + struct DependencyInfo + { + using NativeType = VkDependencyInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + uint32_t memoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {}, + uint32_t bufferMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, + uint32_t imageMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( memoryBarrierCount_ ) + , pMemoryBarriers( pMemoryBarriers_ ) + , bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ) + , pBufferMemoryBarriers( pBufferMemoryBarriers_ ) + , imageMemoryBarrierCount( imageMemoryBarrierCount_ ) + , pImageMemoryBarriers( pImageMemoryBarriers_ ) + { + } + + VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DependencyInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferMemoryBarriers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageMemoryBarriers_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( static_cast( memoryBarriers_.size() ) ) + , pMemoryBarriers( memoryBarriers_.data() ) + , bufferMemoryBarrierCount( static_cast( bufferMemoryBarriers_.size() ) ) + , pBufferMemoryBarriers( bufferMemoryBarriers_.data() ) + , imageMemoryBarrierCount( static_cast( imageMemoryBarriers_.size() ) ) + , pImageMemoryBarriers( imageMemoryBarriers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = memoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pMemoryBarriers = pMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & + setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + memoryBarrierCount = static_cast( memoryBarriers_.size() ); + pMemoryBarriers = memoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = bufferMemoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pBufferMemoryBarriers = pBufferMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setBufferMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = static_cast( bufferMemoryBarriers_.size() ); + pBufferMemoryBarriers = bufferMemoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = imageMemoryBarrierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + pImageMemoryBarriers = pImageMemoryBarriers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setImageMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + imageMemoryBarrierCount = static_cast( imageMemoryBarriers_.size() ); + pImageMemoryBarriers = imageMemoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DependencyInfo const & ) const = default; +#else + bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) && + ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) && + ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers ); +# endif + } + + bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + uint32_t memoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {}; + uint32_t bufferMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {}; + uint32_t imageMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {}; + }; + + template <> + struct CppType + { + using Type = DependencyInfo; + }; + + using DependencyInfoKHR = DependencyInfo; + + struct DepthBiasInfoEXT + { + using NativeType = VkDepthBiasInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , depthBiasConstantFactor( depthBiasConstantFactor_ ) + , depthBiasClamp( depthBiasClamp_ ) + , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) + { + } + + VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DepthBiasInfoEXT( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthBiasInfoEXT( *reinterpret_cast( &rhs ) ) {} + + DepthBiasInfoEXT & operator=( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DepthBiasInfoEXT & operator=( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDepthBiasInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthBiasInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DepthBiasInfoEXT const & ) const = default; +#else + bool operator==( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ); +# endif + } + + bool operator!=( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDepthBiasInfoEXT; + const void * pNext = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + }; + + template <> + struct CppType + { + using Type = DepthBiasInfoEXT; + }; + + struct DepthBiasRepresentationInfoEXT + { + using NativeType = VkDepthBiasRepresentationInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasRepresentationInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation_ = + VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , depthBiasRepresentation( depthBiasRepresentation_ ) + , depthBiasExact( depthBiasExact_ ) + { + } + + VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DepthBiasRepresentationInfoEXT( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DepthBiasRepresentationInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DepthBiasRepresentationInfoEXT & operator=( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DepthBiasRepresentationInfoEXT & operator=( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & + setDepthBiasRepresentation( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasRepresentation = depthBiasRepresentation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setDepthBiasExact( VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasExact = depthBiasExact_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDepthBiasRepresentationInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthBiasRepresentationInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthBiasRepresentation, depthBiasExact ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DepthBiasRepresentationInfoEXT const & ) const = default; +#else + bool operator==( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasRepresentation == rhs.depthBiasRepresentation ) && + ( depthBiasExact == rhs.depthBiasExact ); +# endif + } + + bool operator!=( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDepthBiasRepresentationInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation = VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact = {}; + }; + + template <> + struct CppType + { + using Type = DepthBiasRepresentationInfoEXT; + }; + + struct DescriptorAddressInfoEXT + { + using NativeType = VkDescriptorAddressInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorAddressInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , address( address_ ) + , range( range_ ) + , format( format_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorAddressInfoEXT( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorAddressInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorAddressInfoEXT & operator=( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorAddressInfoEXT & operator=( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT + { + address = address_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorAddressInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, address, range, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorAddressInfoEXT const & ) const = default; +#else + bool operator==( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( range == rhs.range ) && ( format == rhs.format ); +# endif + } + + bool operator!=( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorAddressInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress address = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = DescriptorAddressInfoEXT; + }; + + struct DescriptorBufferBindingInfoEXT + { + using NativeType = VkDescriptorBufferBindingInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , address( address_ ) + , usage( usage_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferBindingInfoEXT( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferBindingInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorBufferBindingInfoEXT & operator=( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorBufferBindingInfoEXT & operator=( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT + { + address = address_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorBufferBindingInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, address, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferBindingInfoEXT const & ) const = default; +#else + bool operator==( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress address = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorBufferBindingInfoEXT; + }; + + struct DescriptorBufferBindingPushDescriptorBufferHandleEXT + { + using NativeType = VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , buffer( buffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR + DescriptorBufferBindingPushDescriptorBufferHandleEXT( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferBindingPushDescriptorBufferHandleEXT( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferBindingPushDescriptorBufferHandleEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorBufferBindingPushDescriptorBufferHandleEXT & + operator=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & ) const = default; +#else + bool operator==( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT; + }; + + struct DescriptorBufferInfo + { + using NativeType = VkDescriptorBufferInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , range( range_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferInfo( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, offset, range ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorBufferInfo const & ) const = default; +#else + bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); +# endif + } + + bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + + struct DescriptorImageInfo + { + using NativeType = VkDescriptorImageInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : sampler( sampler_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sampler, imageView, imageLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorImageInfo const & ) const = default; +#else + bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); +# endif + } + + bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + union DescriptorDataEXT + { + using NativeType = VkDescriptorDataEXT; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ = {} ) : pSampler( pSampler_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pDescriptorImageInfo_ ) + : pCombinedImageSampler( pDescriptorImageInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pDescriptorAddressInfoEXT_ ) + : pUniformTexelBuffer( pDescriptorAddressInfoEXT_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) : accelerationStructure( accelerationStructure_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampler( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ ) VULKAN_HPP_NOEXCEPT + { + pSampler = pSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPCombinedImageSampler( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler_ ) VULKAN_HPP_NOEXCEPT + { + pCombinedImageSampler = pCombinedImageSampler_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPInputAttachmentImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachmentImage = pInputAttachmentImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampledImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + pSampledImage = pSampledImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage_ ) VULKAN_HPP_NOEXCEPT + { + pStorageImage = pStorageImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPUniformTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pUniformTexelBuffer = pUniformTexelBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPStorageTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pStorageTexelBuffer = pStorageTexelBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pUniformBuffer = pUniformBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pStorageBuffer = pStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkDescriptorDataEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorDataEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::Sampler * pSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer; + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure; +#else + const VkSampler * pSampler; + const VkDescriptorImageInfo * pCombinedImageSampler; + const VkDescriptorImageInfo * pInputAttachmentImage; + const VkDescriptorImageInfo * pSampledImage; + const VkDescriptorImageInfo * pStorageImage; + const VkDescriptorAddressInfoEXT * pUniformTexelBuffer; + const VkDescriptorAddressInfoEXT * pStorageTexelBuffer; + const VkDescriptorAddressInfoEXT * pUniformBuffer; + const VkDescriptorAddressInfoEXT * pStorageBuffer; + VkDeviceAddress accelerationStructure; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct DescriptorGetInfoEXT + { + using NativeType = VkDescriptorGetInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorGetInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::DescriptorDataEXT data_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + , data( data_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorGetInfoEXT( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorGetInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorGetInfoEXT & operator=( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorGetInfoEXT & operator=( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setData( VULKAN_HPP_NAMESPACE::DescriptorDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorGetInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorGetInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorGetInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::DescriptorDataEXT data = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorGetInfoEXT; + }; + + struct DescriptorPoolSize + { + using NativeType = VkDescriptorPoolSize; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , descriptorCount( descriptorCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize( *reinterpret_cast( &rhs ) ) {} + + DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, descriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolSize const & ) const = default; +#else + bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); +# endif + } + + bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + }; + + struct DescriptorPoolCreateInfo + { + using NativeType = VkDescriptorPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, + uint32_t maxSets_ = {}, + uint32_t poolSizeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( poolSizeCount_ ) + , pPoolSizes( pPoolSizes_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, + uint32_t maxSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT + { + maxSets = maxSets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = poolSizeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + { + pPoolSizes = pPoolSizes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo & + setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = static_cast( poolSizes_.size() ); + pPoolSizes = poolSizes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; +#else + bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && + ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); +# endif + } + + bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; + uint32_t maxSets = {}; + uint32_t poolSizeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorPoolCreateInfo; + }; + + struct DescriptorPoolInlineUniformBlockCreateInfo + { + using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & + setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT + { + maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxInlineUniformBlockBindings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default; +#else + bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); +# endif + } + + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; + const void * pNext = {}; + uint32_t maxInlineUniformBlockBindings = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorPoolInlineUniformBlockCreateInfo; + }; + + using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; + + struct DescriptorSetAllocateInfo + { + using NativeType = VkDescriptorSetAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , descriptorPool( descriptorPool_ ) + , descriptorSetCount( descriptorSetCount_ ) + , pSetLayouts( pSetLayouts_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT + { + descriptorPool = descriptorPool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; +#else + bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pSetLayouts == rhs.pSetLayouts ); +# endif + } + + bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetAllocateInfo; + }; + + struct DescriptorSetBindingReferenceVALVE + { + using NativeType = VkDescriptorSetBindingReferenceVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + uint32_t binding_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , binding( binding_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetBindingReferenceVALVE( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSetLayout, binding ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default; +#else + bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( binding == rhs.binding ); +# endif + } + + bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + uint32_t binding = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetBindingReferenceVALVE; + }; + + struct DescriptorSetLayoutBinding + { + using NativeType = VkDescriptorSetLayoutBinding; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( descriptorCount_ ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( pImmutableSamplers_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBinding( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding( uint32_t binding_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( static_cast( immutableSamplers_.size() ) ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( immutableSamplers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT + { + pImmutableSamplers = pImmutableSamplers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding & + setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( immutableSamplers_.size() ); + pImmutableSamplers = immutableSamplers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; +#else + bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && ( descriptorCount == rhs.descriptorCount ) && + ( stageFlags == rhs.stageFlags ) && ( pImmutableSamplers == rhs.pImmutableSamplers ); +# endif + } + + bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {}; + }; + + struct DescriptorSetLayoutBindingFlagsCreateInfo + { + using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , bindingCount( bindingCount_ ) + , pBindingFlags( pBindingFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & + setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + pBindingFlags = pBindingFlags_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindingFlags_.size() ); + pBindingFlags = bindingFlags_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bindingCount, pBindingFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; +#else + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && ( pBindingFlags == rhs.pBindingFlags ); +# endif + } + + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + const void * pNext = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetLayoutBindingFlagsCreateInfo; + }; + + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + + struct DescriptorSetLayoutCreateInfo + { + using NativeType = VkDescriptorSetLayoutCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , bindingCount( bindingCount_ ) + , pBindings( pBindings_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & + setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT + { + pBindings = pBindings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo & + setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = static_cast( bindings_.size() ); + pBindings = bindings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, bindingCount, pBindings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; +#else + bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( bindingCount == rhs.bindingCount ) && + ( pBindings == rhs.pBindings ); +# endif + } + + bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetLayoutCreateInfo; + }; + + struct DescriptorSetLayoutHostMappingInfoVALVE + { + using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutHostMappingInfoVALVE( size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , descriptorOffset( descriptorOffset_ ) + , descriptorSize( descriptorSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT + { + descriptorOffset = descriptorOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSize = descriptorSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorOffset, descriptorSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default; +#else + bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorOffset == rhs.descriptorOffset ) && ( descriptorSize == rhs.descriptorSize ); +# endif + } + + bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; + void * pNext = {}; + size_t descriptorOffset = {}; + uint32_t descriptorSize = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetLayoutHostMappingInfoVALVE; + }; + + struct DescriptorSetLayoutSupport + { + using NativeType = VkDescriptorSetLayoutSupport; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , supported( supported_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutSupport( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supported ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; +#else + bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); +# endif + } + + bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetLayoutSupport; + }; + + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + + struct DescriptorSetVariableDescriptorCountAllocateInfo + { + using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, + const uint32_t * pDescriptorCounts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , descriptorSetCount( descriptorSetCount_ ) + , pDescriptorCounts( pDescriptorCounts_ ) + { + } + + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), descriptorSetCount( static_cast( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorCounts = pDescriptorCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo & + setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( descriptorCounts_.size() ); + pDescriptorCounts = descriptorCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pDescriptorCounts == rhs.pDescriptorCounts ); +# endif + } + + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + const void * pNext = {}; + uint32_t descriptorSetCount = {}; + const uint32_t * pDescriptorCounts = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountAllocateInfo; + }; + + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; + + struct DescriptorSetVariableDescriptorCountLayoutSupport + { + using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxVariableDescriptorCount( maxVariableDescriptorCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVariableDescriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); +# endif + } + + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + void * pNext = {}; + uint32_t maxVariableDescriptorCount = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorSetVariableDescriptorCountLayoutSupport; + }; + + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; + + struct DescriptorUpdateTemplateEntry + { + using NativeType = VkDescriptorUpdateTemplateEntry; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + size_t offset_ = {}, + size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , offset( offset_ ) + , stride( stride_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateEntry( *reinterpret_cast( &rhs ) ) + { + } + + DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && + ( descriptorType == rhs.descriptorType ) && ( offset == rhs.offset ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + size_t offset = {}; + size_t stride = {}; + }; + + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + + struct DescriptorUpdateTemplateCreateInfo + { + using NativeType = VkDescriptorUpdateTemplateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, + uint32_t descriptorUpdateEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) + , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + { + } + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ) + , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_ ) + VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); + pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT + { + templateType = templateType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; +#else + bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && + ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && + ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipelineLayout == rhs.pipelineLayout ) && + ( set == rhs.set ); +# endif + } + + bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; + uint32_t descriptorUpdateEntryCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t set = {}; + }; + + template <> + struct CppType + { + using Type = DescriptorUpdateTemplateCreateInfo; + }; + + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + + struct DeviceAddressBindingCallbackDataEXT + { + using NativeType = VkDeviceAddressBindingCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceAddressBindingCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , baseAddress( baseAddress_ ) + , size( size_ ) + , bindingType( bindingType_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceAddressBindingCallbackDataEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ ) VULKAN_HPP_NOEXCEPT + { + baseAddress = baseAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & + setBindingType( VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ ) VULKAN_HPP_NOEXCEPT + { + bindingType = bindingType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, baseAddress, size, bindingType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( baseAddress == rhs.baseAddress ) && ( size == rhs.size ) && + ( bindingType == rhs.bindingType ); +# endif + } + + bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceAddressBindingCallbackDataEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind; + }; + + template <> + struct CppType + { + using Type = DeviceAddressBindingCallbackDataEXT; + }; + + struct DeviceBufferMemoryRequirements + { + using NativeType = VkDeviceBufferMemoryRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pCreateInfo( pCreateInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceBufferMemoryRequirements( *reinterpret_cast( &rhs ) ) + { + } + + DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default; +#else + bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ); +# endif + } + + bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {}; + }; + + template <> + struct CppType + { + using Type = DeviceBufferMemoryRequirements; + }; + + using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; + + struct DeviceQueueCreateInfo + { + using NativeType = VkDeviceQueueCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueCount_ = {}, + const float * pQueuePriorities_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( queueCount_ ) + , pQueuePriorities( pQueuePriorities_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( static_cast( queuePriorities_.size() ) ) + , pQueuePriorities( queuePriorities_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = queueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + pQueuePriorities = pQueuePriorities_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = static_cast( queuePriorities_.size() ); + pQueuePriorities = queuePriorities_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueCreateInfo const & ) const = default; +#else + bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( queueCount == rhs.queueCount ) && ( pQueuePriorities == rhs.pQueuePriorities ); +# endif + } + + bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueCount = {}; + const float * pQueuePriorities = {}; + }; + + template <> + struct CppType + { + using Type = DeviceQueueCreateInfo; + }; + + struct PhysicalDeviceFeatures + { + using NativeType = VkPhysicalDeviceFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess( robustBufferAccess_ ) + , fullDrawIndexUint32( fullDrawIndexUint32_ ) + , imageCubeArray( imageCubeArray_ ) + , independentBlend( independentBlend_ ) + , geometryShader( geometryShader_ ) + , tessellationShader( tessellationShader_ ) + , sampleRateShading( sampleRateShading_ ) + , dualSrcBlend( dualSrcBlend_ ) + , logicOp( logicOp_ ) + , multiDrawIndirect( multiDrawIndirect_ ) + , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) + , depthClamp( depthClamp_ ) + , depthBiasClamp( depthBiasClamp_ ) + , fillModeNonSolid( fillModeNonSolid_ ) + , depthBounds( depthBounds_ ) + , wideLines( wideLines_ ) + , largePoints( largePoints_ ) + , alphaToOne( alphaToOne_ ) + , multiViewport( multiViewport_ ) + , samplerAnisotropy( samplerAnisotropy_ ) + , textureCompressionETC2( textureCompressionETC2_ ) + , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) + , textureCompressionBC( textureCompressionBC_ ) + , occlusionQueryPrecise( occlusionQueryPrecise_ ) + , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) + , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) + , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) + , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) + , shaderImageGatherExtended( shaderImageGatherExtended_ ) + , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) + , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) + , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) + , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) + , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) + , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) + , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) + , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) + , shaderClipDistance( shaderClipDistance_ ) + , shaderCullDistance( shaderCullDistance_ ) + , shaderFloat64( shaderFloat64_ ) + , shaderInt64( shaderInt64_ ) + , shaderInt16( shaderInt16_ ) + , shaderResourceResidency( shaderResourceResidency_ ) + , shaderResourceMinLod( shaderResourceMinLod_ ) + , sparseBinding( sparseBinding_ ) + , sparseResidencyBuffer( sparseResidencyBuffer_ ) + , sparseResidencyImage2D( sparseResidencyImage2D_ ) + , sparseResidencyImage3D( sparseResidencyImage3D_ ) + , sparseResidency2Samples( sparseResidency2Samples_ ) + , sparseResidency4Samples( sparseResidency4Samples_ ) + , sparseResidency8Samples( sparseResidency8Samples_ ) + , sparseResidency16Samples( sparseResidency16Samples_ ) + , sparseResidencyAliased( sparseResidencyAliased_ ) + , variableMultisampleRate( variableMultisampleRate_ ) + , inheritedQueries( inheritedQueries_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess = robustBufferAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT + { + fullDrawIndexUint32 = fullDrawIndexUint32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT + { + imageCubeArray = imageCubeArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT + { + independentBlend = independentBlend_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT + { + geometryShader = geometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + tessellationShader = tessellationShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT + { + sampleRateShading = sampleRateShading_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT + { + dualSrcBlend = dualSrcBlend_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT + { + multiDrawIndirect = multiDrawIndirect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectFirstInstance = drawIndirectFirstInstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthClamp = depthClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT + { + fillModeNonSolid = fillModeNonSolid_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT + { + depthBounds = depthBounds_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT + { + wideLines = wideLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT + { + largePoints = largePoints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOne = alphaToOne_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT + { + multiViewport = multiViewport_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + samplerAnisotropy = samplerAnisotropy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionETC2 = textureCompressionETC2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_LDR = textureCompressionASTC_LDR_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionBC = textureCompressionBC_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryPrecise = occlusionQueryPrecise_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatisticsQuery = pipelineStatisticsQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT + { + shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageGatherExtended = shaderImageGatherExtended_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageMultisample = shaderStorageImageMultisample_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderClipDistance = shaderClipDistance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderCullDistance = shaderCullDistance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat64 = shaderFloat64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt64 = shaderInt64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt16 = shaderInt16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceResidency = shaderResourceResidency_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceMinLod = shaderResourceMinLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT + { + sparseBinding = sparseBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyBuffer = sparseResidencyBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage2D = sparseResidencyImage2D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage3D = sparseResidencyImage3D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency2Samples = sparseResidency2Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency4Samples = sparseResidency4Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency8Samples = sparseResidency8Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency16Samples = sparseResidency16Samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyAliased = sparseResidencyAliased_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT + { + variableMultisampleRate = variableMultisampleRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT + { + inheritedQueries = inheritedQueries_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( robustBufferAccess, + fullDrawIndexUint32, + imageCubeArray, + independentBlend, + geometryShader, + tessellationShader, + sampleRateShading, + dualSrcBlend, + logicOp, + multiDrawIndirect, + drawIndirectFirstInstance, + depthClamp, + depthBiasClamp, + fillModeNonSolid, + depthBounds, + wideLines, + largePoints, + alphaToOne, + multiViewport, + samplerAnisotropy, + textureCompressionETC2, + textureCompressionASTC_LDR, + textureCompressionBC, + occlusionQueryPrecise, + pipelineStatisticsQuery, + vertexPipelineStoresAndAtomics, + fragmentStoresAndAtomics, + shaderTessellationAndGeometryPointSize, + shaderImageGatherExtended, + shaderStorageImageExtendedFormats, + shaderStorageImageMultisample, + shaderStorageImageReadWithoutFormat, + shaderStorageImageWriteWithoutFormat, + shaderUniformBufferArrayDynamicIndexing, + shaderSampledImageArrayDynamicIndexing, + shaderStorageBufferArrayDynamicIndexing, + shaderStorageImageArrayDynamicIndexing, + shaderClipDistance, + shaderCullDistance, + shaderFloat64, + shaderInt64, + shaderInt16, + shaderResourceResidency, + shaderResourceMinLod, + sparseBinding, + sparseResidencyBuffer, + sparseResidencyImage2D, + sparseResidencyImage3D, + sparseResidency2Samples, + sparseResidency4Samples, + sparseResidency8Samples, + sparseResidency16Samples, + sparseResidencyAliased, + variableMultisampleRate, + inheritedQueries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && + ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && ( geometryShader == rhs.geometryShader ) && + ( tessellationShader == rhs.tessellationShader ) && ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && + ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && + ( depthClamp == rhs.depthClamp ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && + ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && + ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && + ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && + ( textureCompressionBC == rhs.textureCompressionBC ) && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && + ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && + ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && + ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && + ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && + ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && + ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && + ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && + ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && + ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && + ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && + ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && ( shaderClipDistance == rhs.shaderClipDistance ) && + ( shaderCullDistance == rhs.shaderCullDistance ) && ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && + ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && + ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && + ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && + ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && + ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && + ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && + ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); +# endif + } + + bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; + VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; + }; + + struct DeviceCreateInfo + { + using NativeType = VkDeviceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, + uint32_t queueCreateInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , queueCreateInfoCount( queueCreateInfoCount_ ) + , pQueueCreateInfos( pQueueCreateInfos_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + , pEnabledFeatures( pEnabledFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ) + , pQueueCreateInfos( queueCreateInfos_.data() ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + , pEnabledFeatures( pEnabledFeatures_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = queueCreateInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + pQueueCreateInfos = pQueueCreateInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setQueueCreateInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); + pQueueCreateInfos = queueCreateInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & + setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & + setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledFeatures = pEnabledFeatures_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + queueCreateInfoCount, + pQueueCreateInfos, + enabledLayerCount, + ppEnabledLayerNames, + enabledExtensionCount, + ppEnabledExtensionNames, + pEnabledFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) + return cmp; + if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) + return cmp; + if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledLayerCount; ++i ) + { + if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) + if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledExtensionCount; ++i ) + { + if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) + if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && + ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && ( enabledLayerCount == rhs.enabledLayerCount ) && + std::equal( ppEnabledLayerNames, + ppEnabledLayerNames + enabledLayerCount, + rhs.ppEnabledLayerNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + std::equal( ppEnabledExtensionNames, + ppEnabledExtensionNames + enabledExtensionCount, + rhs.ppEnabledExtensionNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( pEnabledFeatures == rhs.pEnabledFeatures ); + } + + bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; + uint32_t queueCreateInfoCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {}; + }; + + template <> + struct CppType + { + using Type = DeviceCreateInfo; + }; + + struct DeviceDeviceMemoryReportCreateInfoEXT + { + using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & + setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pfnUserCallback, pUserData ); + } +#endif + + bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnUserCallback == rhs.pfnUserCallback ) && + ( pUserData == rhs.pUserData ); +#endif + } + + bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; + }; + + template <> + struct CppType + { + using Type = DeviceDeviceMemoryReportCreateInfoEXT; + }; + + struct DeviceDiagnosticsConfigCreateInfoNV + { + using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; +#else + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; + }; + + template <> + struct CppType + { + using Type = DeviceDiagnosticsConfigCreateInfoNV; + }; + + struct DeviceEventInfoEXT + { + using NativeType = VkDeviceEventInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceEvent( deviceEvent_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT( *reinterpret_cast( &rhs ) ) {} + + DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT + { + deviceEvent = deviceEvent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceEvent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceEventInfoEXT const & ) const = default; +#else + bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); +# endif + } + + bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; + }; + + template <> + struct CppType + { + using Type = DeviceEventInfoEXT; + }; + + struct DeviceFaultAddressInfoEXT + { + using NativeType = VkDeviceFaultAddressInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceFaultAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone, + VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ = {} ) VULKAN_HPP_NOEXCEPT + : addressType( addressType_ ) + , reportedAddress( reportedAddress_ ) + , addressPrecision( addressPrecision_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultAddressInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ ) VULKAN_HPP_NOEXCEPT + { + addressType = addressType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ ) VULKAN_HPP_NOEXCEPT + { + reportedAddress = reportedAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ ) VULKAN_HPP_NOEXCEPT + { + addressPrecision = addressPrecision_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( addressType, reportedAddress, addressPrecision ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default; +#else + bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( addressType == rhs.addressType ) && ( reportedAddress == rhs.reportedAddress ) && ( addressPrecision == rhs.addressPrecision ); +# endif + } + + bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone; + VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision = {}; + }; + + struct DeviceFaultCountsEXT + { + using NativeType = VkDeviceFaultCountsEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultCountsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( uint32_t addressInfoCount_ = {}, + uint32_t vendorInfoCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , addressInfoCount( addressInfoCount_ ) + , vendorInfoCount( vendorInfoCount_ ) + , vendorBinarySize( vendorBinarySize_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultCountsEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + addressInfoCount = addressInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + vendorInfoCount = vendorInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ ) VULKAN_HPP_NOEXCEPT + { + vendorBinarySize = vendorBinarySize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultCountsEXT const & ) const = default; +#else + bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( addressInfoCount == rhs.addressInfoCount ) && ( vendorInfoCount == rhs.vendorInfoCount ) && + ( vendorBinarySize == rhs.vendorBinarySize ); +# endif + } + + bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultCountsEXT; + void * pNext = {}; + uint32_t addressInfoCount = {}; + uint32_t vendorInfoCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize = {}; + }; + + template <> + struct CppType + { + using Type = DeviceFaultCountsEXT; + }; + + struct DeviceFaultVendorInfoEXT + { + using NativeType = VkDeviceFaultVendorInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( std::array const & description_ = {}, + uint64_t vendorFaultCode_ = {}, + uint64_t vendorFaultData_ = {} ) VULKAN_HPP_NOEXCEPT + : description( description_ ) + , vendorFaultCode( vendorFaultCode_ ) + , vendorFaultData( vendorFaultData_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultVendorInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array description_ ) VULKAN_HPP_NOEXCEPT + { + description = description_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT + { + vendorFaultCode = vendorFaultCode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) VULKAN_HPP_NOEXCEPT + { + vendorFaultData = vendorFaultData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, uint64_t const &, uint64_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( description, vendorFaultCode, vendorFaultData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultVendorInfoEXT const & ) const = default; +#else + bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( description == rhs.description ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData ); +# endif + } + + bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint64_t vendorFaultCode = {}; + uint64_t vendorFaultData = {}; + }; + + struct DeviceFaultInfoEXT + { + using NativeType = VkDeviceFaultInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ = {}, + void * pVendorBinaryData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , description( description_ ) + , pAddressInfos( pAddressInfos_ ) + , pVendorInfos( pVendorInfos_ ) + , pVendorBinaryData( pVendorBinaryData_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultInfoEXT( *reinterpret_cast( &rhs ) ) {} + + DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceFaultInfoEXT & operator=( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setDescription( std::array description_ ) VULKAN_HPP_NOEXCEPT + { + description = description_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPAddressInfos( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ ) VULKAN_HPP_NOEXCEPT + { + pAddressInfos = pAddressInfos_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPVendorInfos( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ ) VULKAN_HPP_NOEXCEPT + { + pVendorInfos = pVendorInfos_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT & setPVendorBinaryData( void * pVendorBinaryData_ ) VULKAN_HPP_NOEXCEPT + { + pVendorBinaryData = pVendorBinaryData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * const &, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * const &, + void * const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultInfoEXT const & ) const = default; +#else + bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( description == rhs.description ) && ( pAddressInfos == rhs.pAddressInfos ) && + ( pVendorInfos == rhs.pVendorInfos ) && ( pVendorBinaryData == rhs.pVendorBinaryData ); +# endif + } + + bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos = {}; + void * pVendorBinaryData = {}; + }; + + template <> + struct CppType + { + using Type = DeviceFaultInfoEXT; + }; + + struct DeviceFaultVendorBinaryHeaderVersionOneEXT + { + using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( + uint32_t headerSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + uint32_t driverVersion_ = {}, + std::array const & pipelineCacheUUID_ = {}, + uint32_t applicationNameOffset_ = {}, + uint32_t applicationVersion_ = {}, + uint32_t engineNameOffset_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : headerSize( headerSize_ ) + , headerVersion( headerVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , driverVersion( driverVersion_ ) + , pipelineCacheUUID( pipelineCacheUUID_ ) + , applicationNameOffset( applicationNameOffset_ ) + , applicationVersion( applicationVersion_ ) + , engineNameOffset( engineNameOffset_ ) + , engineVersion( engineVersion_ ) + , apiVersion( apiVersion_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT + { + headerSize = headerSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & + setHeaderVersion( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) VULKAN_HPP_NOEXCEPT + { + headerVersion = headerVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT + { + vendorID = vendorID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT + { + deviceID = deviceID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) VULKAN_HPP_NOEXCEPT + { + driverVersion = driverVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & + setPipelineCacheUUID( std::array pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCacheUUID = pipelineCacheUUID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) VULKAN_HPP_NOEXCEPT + { + applicationNameOffset = applicationNameOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT + { + applicationVersion = applicationVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) VULKAN_HPP_NOEXCEPT + { + engineNameOffset = engineNameOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT + { + engineVersion = engineVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT + { + apiVersion = apiVersion_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( headerSize, + headerVersion, + vendorID, + deviceID, + driverVersion, + pipelineCacheUUID, + applicationNameOffset, + applicationVersion, + engineNameOffset, + engineVersion, + apiVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default; +#else + bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( driverVersion == rhs.driverVersion ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( applicationNameOffset == rhs.applicationNameOffset ) && ( applicationVersion == rhs.applicationVersion ) && + ( engineNameOffset == rhs.engineNameOffset ) && ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); +# endif + } + + bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t headerSize = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + uint32_t driverVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + uint32_t applicationNameOffset = {}; + uint32_t applicationVersion = {}; + uint32_t engineNameOffset = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; + }; + + struct DeviceGroupBindSparseInfo + { + using NativeType = VkDeviceGroupBindSparseInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , resourceDeviceIndex( resourceDeviceIndex_ ) + , memoryDeviceIndex( memoryDeviceIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupBindSparseInfo( *reinterpret_cast( &rhs ) ) + { + } + + DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + resourceDeviceIndex = resourceDeviceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryDeviceIndex = memoryDeviceIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; +#else + bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && + ( memoryDeviceIndex == rhs.memoryDeviceIndex ); +# endif + } + + bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + const void * pNext = {}; + uint32_t resourceDeviceIndex = {}; + uint32_t memoryDeviceIndex = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupBindSparseInfo; + }; + + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + + struct DeviceGroupCommandBufferBeginInfo + { + using NativeType = VkDeviceGroupCommandBufferBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceMask( deviceMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + + DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupCommandBufferBeginInfo; + }; + + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + + struct DeviceGroupDeviceCreateInfo + { + using NativeType = VkDeviceGroupDeviceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , physicalDeviceCount( physicalDeviceCount_ ) + , pPhysicalDevices( pPhysicalDevices_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupDeviceCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), physicalDeviceCount( static_cast( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & + setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo & setPhysicalDevices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = static_cast( physicalDevices_.size() ); + pPhysicalDevices = physicalDevices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; +#else + bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( pPhysicalDevices == rhs.pPhysicalDevices ); +# endif + } + + bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void * pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupDeviceCreateInfo; + }; + + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + + struct DeviceGroupPresentCapabilitiesKHR + { + using NativeType = VkDeviceGroupPresentCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const & presentMask_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentMask( presentMask_ ) + , modes( modes_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentMask, modes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && ( modes == rhs.modes ); +# endif + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupPresentCapabilitiesKHR; + }; + + struct DeviceGroupPresentInfoKHR + { + using NativeType = VkDeviceGroupPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( + uint32_t swapchainCount_ = {}, + const uint32_t * pDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchainCount( swapchainCount_ ) + , pDeviceMasks( pDeviceMasks_ ) + , mode( mode_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( deviceMasks_.size() ); + pDeviceMasks = deviceMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pDeviceMasks == rhs.pDeviceMasks ) && + ( mode == rhs.mode ); +# endif + } + + bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t * pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + }; + + template <> + struct CppType + { + using Type = DeviceGroupPresentInfoKHR; + }; + + struct DeviceGroupRenderPassBeginInfo + { + using NativeType = VkDeviceGroupRenderPassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, + uint32_t deviceRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceMask( deviceMask_ ) + , deviceRenderAreaCount( deviceRenderAreaCount_ ) + , pDeviceRenderAreas( pDeviceRenderAreas_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupRenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , deviceMask( deviceMask_ ) + , deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ) + , pDeviceRenderAreas( deviceRenderAreas_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & + setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo & + setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); + pDeviceRenderAreas = deviceRenderAreas_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; +#else + bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && + ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); +# endif + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupRenderPassBeginInfo; + }; + + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + struct DeviceGroupSubmitInfo + { + using NativeType = VkDeviceGroupSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, + uint32_t commandBufferCount_ = {}, + const uint32_t * pCommandBufferDeviceMasks_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const uint32_t * pSignalSemaphoreDeviceIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ) + , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ) + , commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ) + , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ) + , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); + pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); + pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); + pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + waitSemaphoreCount, + pWaitSemaphoreDeviceIndices, + commandBufferCount, + pCommandBufferDeviceMasks, + signalSemaphoreCount, + pSignalSemaphoreDeviceIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; +#else + bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); +# endif + } + + bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t * pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t * pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t * pSignalSemaphoreDeviceIndices = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupSubmitInfo; + }; + + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + struct DeviceGroupSwapchainCreateInfoKHR + { + using NativeType = VkDeviceGroupSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , modes( modes_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + { + modes = modes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, modes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); +# endif + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + + template <> + struct CppType + { + using Type = DeviceGroupSwapchainCreateInfoKHR; + }; + + struct ImageCreateInfo + { + using NativeType = VkImageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + uint32_t mipLevels_ = {}, + uint32_t arrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , initialLayout( initialLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageType imageType_, + VULKAN_HPP_NAMESPACE::Format format_, + VULKAN_HPP_NAMESPACE::Extent3D extent_, + uint32_t mipLevels_, + uint32_t arrayLayers_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , initialLayout( initialLayout_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT + { + imageType = imageType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT + { + mipLevels = mipLevels_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayers = arrayLayers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + imageType, + format, + extent, + mipLevels, + arrayLayers, + samples, + tiling, + usage, + sharingMode, + queueFamilyIndexCount, + pQueueFamilyIndices, + initialLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCreateInfo const & ) const = default; +#else + bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( imageType == rhs.imageType ) && ( format == rhs.format ) && + ( extent == rhs.extent ) && ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && + ( initialLayout == rhs.initialLayout ); +# endif + } + + bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + uint32_t mipLevels = {}; + uint32_t arrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = ImageCreateInfo; + }; + + struct DeviceImageMemoryRequirements + { + using NativeType = VkDeviceImageMemoryRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pCreateInfo( pCreateInfo_ ) + , planeAspect( planeAspect_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + { + } + + DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceImageMemoryRequirements const & ) const = default; +#else + bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + + template <> + struct CppType + { + using Type = DeviceImageMemoryRequirements; + }; + + using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + + struct ImageSubresource2KHR + { + using NativeType = VkImageSubresource2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageSubresource( imageSubresource_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( ImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource2KHR( VkImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresource2KHR( *reinterpret_cast( &rhs ) ) + { + } + + ImageSubresource2KHR & operator=( ImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSubresource2KHR & operator=( VkImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2KHR & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageSubresource2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageSubresource ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource2KHR const & ) const = default; +#else + bool operator==( ImageSubresource2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource ); +# endif + } + + bool operator!=( ImageSubresource2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {}; + }; + + template <> + struct CppType + { + using Type = ImageSubresource2KHR; + }; + + using ImageSubresource2EXT = ImageSubresource2KHR; + + struct DeviceImageSubresourceInfoKHR + { + using NativeType = VkDeviceImageSubresourceInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pCreateInfo( pCreateInfo_ ) + , pSubresource( pSubresource_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( DeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceImageSubresourceInfoKHR( VkDeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageSubresourceInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DeviceImageSubresourceInfoKHR & operator=( DeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceImageSubresourceInfoKHR & operator=( VkDeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & + setPSubresource( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ ) VULKAN_HPP_NOEXCEPT + { + pSubresource = pSubresource_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceImageSubresourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceImageSubresourceInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo, pSubresource ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceImageSubresourceInfoKHR const & ) const = default; +#else + bool operator==( DeviceImageSubresourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( pSubresource == rhs.pSubresource ); +# endif + } + + bool operator!=( DeviceImageSubresourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageSubresourceInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource = {}; + }; + + template <> + struct CppType + { + using Type = DeviceImageSubresourceInfoKHR; + }; + + struct DeviceMemoryOpaqueCaptureAddressInfo + { + using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memory( memory_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast( &rhs ) ) + { + } + + DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; +#else + bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + + template <> + struct CppType + { + using Type = DeviceMemoryOpaqueCaptureAddressInfo; + }; + + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + struct DeviceMemoryOverallocationCreateInfoAMD + { + using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , overallocationBehavior( overallocationBehavior_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & + setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + { + overallocationBehavior = overallocationBehavior_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, overallocationBehavior ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; +#else + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( overallocationBehavior == rhs.overallocationBehavior ); +# endif + } + + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + }; + + template <> + struct CppType + { + using Type = DeviceMemoryOverallocationCreateInfoAMD; + }; + + struct DeviceMemoryReportCallbackDataEXT + { + using NativeType = VkDeviceMemoryReportCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, + uint64_t memoryObjectId_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint32_t heapIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , type( type_ ) + , memoryObjectId( memoryObjectId_ ) + , size( size_ ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , heapIndex( heapIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) && + ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); +# endif + } + + bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; + uint64_t memoryObjectId = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint32_t heapIndex = {}; + }; + + template <> + struct CppType + { + using Type = DeviceMemoryReportCallbackDataEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + union DeviceOrHostAddressConstAMDX + { + using NativeType = VkDeviceOrHostAddressConstAMDX; +# if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +# endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +# if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +# endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkDeviceOrHostAddressConstAMDX const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressConstAMDX &() + { + return *reinterpret_cast( this ); + } + +# ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void * hostAddress; +# else + VkDeviceAddress deviceAddress; + const void * hostAddress; +# endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct DevicePrivateDataCreateInfo + { + using NativeType = VkDevicePrivateDataCreateInfo; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , privateDataSlotRequestCount( privateDataSlotRequestCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePrivateDataCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT + { + privateDataSlotRequestCount = privateDataSlotRequestCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, privateDataSlotRequestCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default; +#else + bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); +# endif + } + + bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo; + const void * pNext = {}; + uint32_t privateDataSlotRequestCount = {}; + }; + + template <> + struct CppType + { + using Type = DevicePrivateDataCreateInfo; + }; + + using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; + + struct DeviceQueueGlobalPriorityCreateInfoKHR + { + using NativeType = VkDeviceQueueGlobalPriorityCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceQueueGlobalPriorityCreateInfoKHR( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , globalPriority( globalPriority_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueGlobalPriorityCreateInfoKHR( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueGlobalPriorityCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DeviceQueueGlobalPriorityCreateInfoKHR & operator=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceQueueGlobalPriorityCreateInfoKHR & operator=( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & + setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ ) VULKAN_HPP_NOEXCEPT + { + globalPriority = globalPriority_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, globalPriority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueGlobalPriorityCreateInfoKHR const & ) const = default; +#else + bool operator==( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); +# endif + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow; + }; + + template <> + struct CppType + { + using Type = DeviceQueueGlobalPriorityCreateInfoKHR; + }; + + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR; + + struct DeviceQueueInfo2 + { + using NativeType = VkDeviceQueueInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueIndex( queueIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2( *reinterpret_cast( &rhs ) ) {} + + DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueIndex = queueIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueInfo2 const & ) const = default; +#else + bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( queueIndex == rhs.queueIndex ); +# endif + } + + bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; + }; + + template <> + struct CppType + { + using Type = DeviceQueueInfo2; + }; + + struct DeviceQueueShaderCoreControlCreateInfoARM + { + using NativeType = VkDeviceQueueShaderCoreControlCreateInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( uint32_t shaderCoreCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderCoreCount( shaderCoreCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceQueueShaderCoreControlCreateInfoARM( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueShaderCoreControlCreateInfoARM( *reinterpret_cast( &rhs ) ) + { + } + + DeviceQueueShaderCoreControlCreateInfoARM & operator=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceQueueShaderCoreControlCreateInfoARM & operator=( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setShaderCoreCount( uint32_t shaderCoreCount_ ) VULKAN_HPP_NOEXCEPT + { + shaderCoreCount = shaderCoreCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceQueueShaderCoreControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueShaderCoreControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceQueueShaderCoreControlCreateInfoARM const & ) const = default; +#else + bool operator==( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreCount == rhs.shaderCoreCount ); +# endif + } + + bool operator!=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM; + void * pNext = {}; + uint32_t shaderCoreCount = {}; + }; + + template <> + struct CppType + { + using Type = DeviceQueueShaderCoreControlCreateInfoARM; + }; + + struct DirectDriverLoadingInfoLUNARG + { + using NativeType = VkDirectDriverLoadingInfoLUNARG; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingInfoLUNARG; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ = {}, + PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pfnGetInstanceProcAddr( pfnGetInstanceProcAddr_ ) + { + } + + VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectDriverLoadingInfoLUNARG( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectDriverLoadingInfoLUNARG( *reinterpret_cast( &rhs ) ) + { + } + + DirectDriverLoadingInfoLUNARG & operator=( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DirectDriverLoadingInfoLUNARG & operator=( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setFlags( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & + setPfnGetInstanceProcAddr( PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) VULKAN_HPP_NOEXCEPT + { + pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDirectDriverLoadingInfoLUNARG const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingInfoLUNARG &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pfnGetInstanceProcAddr ); + } +#endif + + bool operator==( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnGetInstanceProcAddr == rhs.pfnGetInstanceProcAddr ); +#endif + } + + bool operator!=( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingInfoLUNARG; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags = {}; + PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr = {}; + }; + + template <> + struct CppType + { + using Type = DirectDriverLoadingInfoLUNARG; + }; + + struct DirectDriverLoadingListLUNARG + { + using NativeType = VkDirectDriverLoadingListLUNARG; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingListLUNARG; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( + VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive, + uint32_t driverCount_ = {}, + const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , mode( mode_ ) + , driverCount( driverCount_ ) + , pDrivers( pDrivers_ ) + { + } + + VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectDriverLoadingListLUNARG( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectDriverLoadingListLUNARG( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DirectDriverLoadingListLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drivers_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), mode( mode_ ), driverCount( static_cast( drivers_.size() ) ), pDrivers( drivers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DirectDriverLoadingListLUNARG & operator=( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DirectDriverLoadingListLUNARG & operator=( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setMode( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setDriverCount( uint32_t driverCount_ ) VULKAN_HPP_NOEXCEPT + { + driverCount = driverCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & + setPDrivers( const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ ) VULKAN_HPP_NOEXCEPT + { + pDrivers = pDrivers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DirectDriverLoadingListLUNARG & setDrivers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drivers_ ) VULKAN_HPP_NOEXCEPT + { + driverCount = static_cast( drivers_.size() ); + pDrivers = drivers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDirectDriverLoadingListLUNARG const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingListLUNARG &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mode, driverCount, pDrivers ); + } +#endif + + bool operator==( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT + { +#if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( driverCount == rhs.driverCount ) && ( pDrivers == rhs.pDrivers ); +#endif + } + + bool operator!=( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingListLUNARG; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive; + uint32_t driverCount = {}; + const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers = {}; + }; + + template <> + struct CppType + { + using Type = DirectDriverLoadingListLUNARG; + }; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + struct DirectFBSurfaceCreateInfoEXT + { + using NativeType = VkDirectFBSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, + IDirectFB * dfb_ = {}, + IDirectFBSurface * surface_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , dfb( dfb_ ) + , surface( surface_ ) + { + } + + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT + { + dfb = dfb_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dfb, surface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && ( surface == rhs.surface ); +# endif + } + + bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; + IDirectFB * dfb = {}; + IDirectFBSurface * surface = {}; + }; + + template <> + struct CppType + { + using Type = DirectFBSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct DispatchGraphCountInfoAMDX + { + using NativeType = VkDispatchGraphCountInfoAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( uint32_t count_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos_ = {}, + uint64_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : count( count_ ) + , infos( infos_ ) + , stride( stride_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchGraphCountInfoAMDX( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchGraphCountInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + DispatchGraphCountInfoAMDX & operator=( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DispatchGraphCountInfoAMDX & operator=( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT + { + count = count_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setInfos( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & infos_ ) VULKAN_HPP_NOEXCEPT + { + infos = infos_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setStride( uint64_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDispatchGraphCountInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchGraphCountInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( count, infos, stride ); + } +# endif + + public: + uint32_t count = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos = {}; + uint64_t stride = {}; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct DispatchGraphInfoAMDX + { + using NativeType = VkDispatchGraphInfoAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( uint32_t nodeIndex_ = {}, + uint32_t payloadCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads_ = {}, + uint64_t payloadStride_ = {} ) VULKAN_HPP_NOEXCEPT + : nodeIndex( nodeIndex_ ) + , payloadCount( payloadCount_ ) + , payloads( payloads_ ) + , payloadStride( payloadStride_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchGraphInfoAMDX( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchGraphInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + DispatchGraphInfoAMDX & operator=( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DispatchGraphInfoAMDX & operator=( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setNodeIndex( uint32_t nodeIndex_ ) VULKAN_HPP_NOEXCEPT + { + nodeIndex = nodeIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadCount( uint32_t payloadCount_ ) VULKAN_HPP_NOEXCEPT + { + payloadCount = payloadCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloads( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & payloads_ ) VULKAN_HPP_NOEXCEPT + { + payloads = payloads_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadStride( uint64_t payloadStride_ ) VULKAN_HPP_NOEXCEPT + { + payloadStride = payloadStride_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDispatchGraphInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchGraphInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( nodeIndex, payloadCount, payloads, payloadStride ); + } +# endif + + public: + uint32_t nodeIndex = {}; + uint32_t payloadCount = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads = {}; + uint64_t payloadStride = {}; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct DispatchIndirectCommand + { + using NativeType = VkDispatchIndirectCommand; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + { + } + + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchIndirectCommand( *reinterpret_cast( &rhs ) ) + { + } + + DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DispatchIndirectCommand const & ) const = default; +#else + bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); +# endif + } + + bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t x = {}; + uint32_t y = {}; + uint32_t z = {}; + }; + + struct DisplayEventInfoEXT + { + using NativeType = VkDisplayEventInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , displayEvent( displayEvent_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT + { + displayEvent = displayEvent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayEvent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayEventInfoEXT const & ) const = default; +#else + bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); +# endif + } + + bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; + }; + + template <> + struct CppType + { + using Type = DisplayEventInfoEXT; + }; + + struct DisplayModeParametersKHR + { + using NativeType = VkDisplayModeParametersKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT + : visibleRegion( visibleRegion_ ) + , refreshRate( refreshRate_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeParametersKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT + { + visibleRegion = visibleRegion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT + { + refreshRate = refreshRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( visibleRegion, refreshRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeParametersKHR const & ) const = default; +#else + bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); +# endif + } + + bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; + uint32_t refreshRate = {}; + }; + + struct DisplayModeCreateInfoKHR + { + using NativeType = VkDisplayModeCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , parameters( parameters_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT + { + parameters = parameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, parameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( parameters == rhs.parameters ); +# endif + } + + bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + + template <> + struct CppType + { + using Type = DisplayModeCreateInfoKHR; + }; + + struct DisplayModePropertiesKHR + { + using NativeType = VkDisplayModePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : displayMode( displayMode_ ) + , parameters( parameters_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( displayMode, parameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); +# endif + } + + bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + + struct DisplayModeProperties2KHR + { + using NativeType = VkDisplayModeProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , displayModeProperties( displayModeProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeProperties2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayModeProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeProperties2KHR const & ) const = default; +#else + bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); +# endif + } + + bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; + }; + + template <> + struct CppType + { + using Type = DisplayModeProperties2KHR; + }; + + struct DisplayNativeHdrSurfaceCapabilitiesAMD + { + using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , localDimmingSupport( localDimmingSupport_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast( &rhs ) ) + { + } + + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, localDimmingSupport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; +#else + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); +# endif + } + + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + }; + + template <> + struct CppType + { + using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; + }; + + struct DisplayPlaneCapabilitiesKHR + { + using NativeType = VkDisplayPlaneCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedAlpha( supportedAlpha_ ) + , minSrcPosition( minSrcPosition_ ) + , maxSrcPosition( maxSrcPosition_ ) + , minSrcExtent( minSrcExtent_ ) + , maxSrcExtent( maxSrcExtent_ ) + , minDstPosition( minDstPosition_ ) + , maxDstPosition( maxDstPosition_ ) + , minDstExtent( minDstExtent_ ) + , maxDstExtent( maxDstExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && ( maxSrcPosition == rhs.maxSrcPosition ) && + ( minSrcExtent == rhs.minSrcExtent ) && ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && + ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && ( maxDstExtent == rhs.maxDstExtent ); +# endif + } + + bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + }; + + struct DisplayPlaneCapabilities2KHR + { + using NativeType = VkDisplayPlaneCapabilities2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , capabilities( capabilities_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilities2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, capabilities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); +# endif + } + + bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPlaneCapabilities2KHR; + }; + + struct DisplayPlaneInfo2KHR + { + using NativeType = VkDisplayPlaneInfo2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , mode( mode_ ) + , planeIndex( planeIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneInfo2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mode, planeIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( planeIndex == rhs.planeIndex ); +# endif + } + + bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPlaneInfo2KHR; + }; + + struct DisplayPlanePropertiesKHR + { + using NativeType = VkDisplayPlanePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, + uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : currentDisplay( currentDisplay_ ) + , currentStackIndex( currentStackIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlanePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( currentDisplay, currentStackIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); +# endif + } + + bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; + }; + + struct DisplayPlaneProperties2KHR + { + using NativeType = VkDisplayPlaneProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , displayPlaneProperties( displayPlaneProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneProperties2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayPlaneProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPlaneProperties == rhs.displayPlaneProperties ); +# endif + } + + bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPlaneProperties2KHR; + }; + + struct DisplayPowerInfoEXT + { + using NativeType = VkDisplayPowerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , powerState( powerState_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT + { + powerState = powerState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, powerState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPowerInfoEXT const & ) const = default; +#else + bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); +# endif + } + + bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + }; + + template <> + struct CppType + { + using Type = DisplayPowerInfoEXT; + }; + + struct DisplayPresentInfoKHR + { + using NativeType = VkDisplayPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcRect( srcRect_ ) + , dstRect( dstRect_ ) + , persistent( persistent_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPresentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT + { + srcRect = srcRect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT + { + dstRect = dstRect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT + { + persistent = persistent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcRect, dstRect, persistent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPresentInfoKHR const & ) const = default; +#else + bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); +# endif + } + + bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPresentInfoKHR; + }; + + struct DisplayPropertiesKHR + { + using NativeType = VkDisplayPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, + const char * displayName_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT + : display( display_ ) + , displayName( displayName_ ) + , physicalDimensions( physicalDimensions_ ) + , physicalResolution( physicalResolution_ ) + , supportedTransforms( supportedTransforms_ ) + , planeReorderPossible( planeReorderPossible_ ) + , persistentContent( persistentContent_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = display <=> rhs.display; cmp != 0 ) + return cmp; + if ( displayName != rhs.displayName ) + if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) + return cmp; + if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) + return cmp; + if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) + return cmp; + if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) + return cmp; + if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( display == rhs.display ) && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) && + ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( planeReorderPossible == rhs.planeReorderPossible ) && + ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char * displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + }; + + struct DisplayProperties2KHR + { + using NativeType = VkDisplayProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , displayProperties( displayProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayProperties2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayProperties2KHR const & ) const = default; +#else + bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); +# endif + } + + bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; + }; + + template <> + struct CppType + { + using Type = DisplayProperties2KHR; + }; + + struct DisplaySurfaceCreateInfoKHR + { + using NativeType = VkDisplaySurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + uint32_t planeIndex_ = {}, + uint32_t planeStackIndex_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = {}, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , displayMode( displayMode_ ) + , planeIndex( planeIndex_ ) + , planeStackIndex( planeStackIndex_ ) + , transform( transform_ ) + , globalAlpha( globalAlpha_ ) + , alphaMode( alphaMode_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + { + displayMode = displayMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeStackIndex = planeStackIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT + { + globalAlpha = globalAlpha_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT + { + alphaMode = alphaMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( displayMode == rhs.displayMode ) && + ( planeIndex == rhs.planeIndex ) && ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && + ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = DisplaySurfaceCreateInfoKHR; + }; + + struct DrawIndexedIndirectCommand + { + using NativeType = VkDrawIndexedIndirectCommand; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstIndex_ = {}, + int32_t vertexOffset_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : indexCount( indexCount_ ) + , instanceCount( instanceCount_ ) + , firstIndex( firstIndex_ ) + , vertexOffset( vertexOffset_ ) + , firstInstance( firstInstance_ ) + { + } + + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndexedIndirectCommand( *reinterpret_cast( &rhs ) ) + { + } + + DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstIndex == rhs.firstIndex ) && + ( vertexOffset == rhs.vertexOffset ) && ( firstInstance == rhs.firstInstance ); +# endif + } + + bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t indexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; + uint32_t firstInstance = {}; + }; + + struct DrawIndirectCommand + { + using NativeType = VkDrawIndirectCommand; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstVertex_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexCount( vertexCount_ ) + , instanceCount( instanceCount_ ) + , firstVertex( firstVertex_ ) + , firstInstance( firstInstance_ ) + { + } + + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand( *reinterpret_cast( &rhs ) ) + { + } + + DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( vertexCount, instanceCount, firstVertex, firstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndirectCommand const & ) const = default; +#else + bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstVertex == rhs.firstVertex ) && + ( firstInstance == rhs.firstInstance ); +# endif + } + + bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t vertexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstVertex = {}; + uint32_t firstInstance = {}; + }; + + struct DrawMeshTasksIndirectCommandEXT + { + using NativeType = VkDrawMeshTasksIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DrawMeshTasksIndirectCommandEXT( uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {} ) VULKAN_HPP_NOEXCEPT + : groupCountX( groupCountX_ ) + , groupCountY( groupCountY_ ) + , groupCountZ( groupCountZ_ ) + { + } + + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) VULKAN_HPP_NOEXCEPT + { + groupCountX = groupCountX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) VULKAN_HPP_NOEXCEPT + { + groupCountY = groupCountY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) VULKAN_HPP_NOEXCEPT + { + groupCountZ = groupCountZ_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( groupCountX, groupCountY, groupCountZ ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( groupCountX == rhs.groupCountX ) && ( groupCountY == rhs.groupCountY ) && ( groupCountZ == rhs.groupCountZ ); +# endif + } + + bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t groupCountX = {}; + uint32_t groupCountY = {}; + uint32_t groupCountZ = {}; + }; + + struct DrawMeshTasksIndirectCommandNV + { + using NativeType = VkDrawMeshTasksIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT + : taskCount( taskCount_ ) + , firstTask( firstTask_ ) + { + } + + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT + { + taskCount = taskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT + { + firstTask = firstTask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( taskCount, firstTask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); +# endif + } + + bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t taskCount = {}; + uint32_t firstTask = {}; + }; + + struct DrmFormatModifierProperties2EXT + { + using NativeType = VkDrmFormatModifierProperties2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierProperties2EXT( *reinterpret_cast( &rhs ) ) + { + } + + DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default; +#else + bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); +# endif + } + + bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {}; + }; + + struct DrmFormatModifierPropertiesEXT + { + using NativeType = VkDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; + }; + + struct DrmFormatModifierPropertiesList2EXT + { + using NativeType = VkDrmFormatModifierPropertiesList2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesList2EXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DrmFormatModifierPropertiesList2EXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifierProperties_, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) + , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {}; + }; + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesList2EXT; + }; + + struct DrmFormatModifierPropertiesListEXT + { + using NativeType = VkDrmFormatModifierPropertiesListEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesListEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DrmFormatModifierPropertiesListEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifierProperties_, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) + , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); +# endif + } + + bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; + }; + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesListEXT; + }; + + struct EventCreateInfo + { + using NativeType = VkEventCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : EventCreateInfo( *reinterpret_cast( &rhs ) ) {} + + EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( EventCreateInfo const & ) const = default; +#else + bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = EventCreateInfo; + }; + + struct PipelineLibraryCreateInfoKHR + { + using NativeType = VkPipelineLibraryCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , libraryCount( libraryCount_ ) + , pLibraries( pLibraries_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = libraryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT + { + pLibraries = pLibraries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR & + setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = static_cast( libraries_.size() ); + pLibraries = libraries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, libraryCount, pLibraries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries ); +# endif + } + + bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void * pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; + }; + + template <> + struct CppType + { + using Type = PipelineLibraryCreateInfoKHR; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct ExecutionGraphPipelineCreateInfoAMDX + { + using NativeType = VkExecutionGraphPipelineCreateInfoAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pLibraryInfo( pLibraryInfo_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExecutionGraphPipelineCreateInfoAMDX( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : ExecutionGraphPipelineCreateInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExecutionGraphPipelineCreateInfoAMDX( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pLibraryInfo( pLibraryInfo_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ExecutionGraphPipelineCreateInfoAMDX & operator=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExecutionGraphPipelineCreateInfoAMDX & operator=( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExecutionGraphPipelineCreateInfoAMDX & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExecutionGraphPipelineCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stageCount, pStages, pLibraryInfo, layout, basePipelineHandle, basePipelineIndex ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExecutionGraphPipelineCreateInfoAMDX const & ) const = default; +# else + bool operator==( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = ExecutionGraphPipelineCreateInfoAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct ExecutionGraphPipelineScratchSizeAMDX + { + using NativeType = VkExecutionGraphPipelineScratchSizeAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExecutionGraphPipelineScratchSizeAMDX( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : ExecutionGraphPipelineScratchSizeAMDX( *reinterpret_cast( &rhs ) ) + { + } + + ExecutionGraphPipelineScratchSizeAMDX & operator=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExecutionGraphPipelineScratchSizeAMDX & operator=( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExecutionGraphPipelineScratchSizeAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineScratchSizeAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, size ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExecutionGraphPipelineScratchSizeAMDX const & ) const = default; +# else + bool operator==( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); +# endif + } + + bool operator!=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = ExecutionGraphPipelineScratchSizeAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct ExportFenceCreateInfo + { + using NativeType = VkExportFenceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleTypes( handleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceCreateInfo const & ) const = default; +#else + bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExportFenceCreateInfo; + }; + + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportFenceWin32HandleInfoKHR + { + using NativeType = VkExportFenceWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ExportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportMemoryAllocateInfo + { + using NativeType = VkExportMemoryAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleTypes( handleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfo; + }; + + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + + struct ExportMemoryAllocateInfoNV + { + using NativeType = VkExportMemoryAllocateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleTypes( handleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExportMemoryAllocateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoKHR + { + using NativeType = VkExportMemoryWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoNV + { + using NativeType = VkExportMemoryWin32HandleInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ); +# endif + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + }; + + template <> + struct CppType + { + using Type = ExportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalBufferInfoEXT + { + using NativeType = VkExportMetalBufferInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalBufferInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + MTLBuffer_id mtlBuffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memory( memory_ ) + , mtlBuffer( mtlBuffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT + { + mtlBuffer = mtlBuffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, mtlBuffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( mtlBuffer == rhs.mtlBuffer ); +# endif + } + + bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalBufferInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + MTLBuffer_id mtlBuffer = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalBufferInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalCommandQueueInfoEXT + { + using NativeType = VkExportMetalCommandQueueInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalCommandQueueInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( VULKAN_HPP_NAMESPACE::Queue queue_ = {}, + MTLCommandQueue_id mtlCommandQueue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , queue( queue_ ) + , mtlCommandQueue( mtlCommandQueue_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalCommandQueueInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( VULKAN_HPP_NAMESPACE::Queue queue_ ) VULKAN_HPP_NOEXCEPT + { + queue = queue_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) VULKAN_HPP_NOEXCEPT + { + mtlCommandQueue = mtlCommandQueue_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queue, mtlCommandQueue ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queue == rhs.queue ) && ( mtlCommandQueue == rhs.mtlCommandQueue ); +# endif + } + + bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalCommandQueueInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Queue queue = {}; + MTLCommandQueue_id mtlCommandQueue = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalCommandQueueInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalDeviceInfoEXT + { + using NativeType = VkExportMetalDeviceInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalDeviceInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , mtlDevice( mtlDevice_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalDeviceInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) VULKAN_HPP_NOEXCEPT + { + mtlDevice = mtlDevice_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mtlDevice ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlDevice == rhs.mtlDevice ); +# endif + } + + bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalDeviceInfoEXT; + const void * pNext = {}; + MTLDevice_id mtlDevice = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalDeviceInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalIOSurfaceInfoEXT + { + using NativeType = VkExportMetalIOSurfaceInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalIoSurfaceInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportMetalIOSurfaceInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + , ioSurface( ioSurface_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT + { + ioSurface = ioSurface_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, ioSurface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( ioSurface == rhs.ioSurface ); +# endif + } + + bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalIoSurfaceInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + IOSurfaceRef ioSurface = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalIOSurfaceInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalObjectCreateInfoEXT + { + using NativeType = VkExportMetalObjectCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , exportObjectType( exportObjectType_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalObjectCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & + setExportObjectType( VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) VULKAN_HPP_NOEXCEPT + { + exportObjectType = exportObjectType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exportObjectType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportObjectType == rhs.exportObjectType ); +# endif + } + + bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice; + }; + + template <> + struct CppType + { + using Type = ExportMetalObjectCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalObjectsInfoEXT + { + using NativeType = VkExportMetalObjectsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {} + + VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalObjectsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectsInfoEXT; + const void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalObjectsInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalSharedEventInfoEXT + { + using NativeType = VkExportMetalSharedEventInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalSharedEventInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Event event_ = {}, + MTLSharedEvent_id mtlSharedEvent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , event( event_ ) + , mtlSharedEvent( mtlSharedEvent_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( VULKAN_HPP_NAMESPACE::Event event_ ) VULKAN_HPP_NOEXCEPT + { + event = event_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT + { + mtlSharedEvent = mtlSharedEvent_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, event, mtlSharedEvent ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( event == rhs.event ) && + ( mtlSharedEvent == rhs.mtlSharedEvent ); +# endif + } + + bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalSharedEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Event event = {}; + MTLSharedEvent_id mtlSharedEvent = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalSharedEventInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalTextureInfoEXT + { + using NativeType = VkExportMetalTextureInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalTextureInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::BufferView bufferView_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + MTLTexture_id mtlTexture_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + , imageView( imageView_ ) + , bufferView( bufferView_ ) + , plane( plane_ ) + , mtlTexture( mtlTexture_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView_ ) VULKAN_HPP_NOEXCEPT + { + bufferView = bufferView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT + { + plane = plane_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT + { + mtlTexture = mtlTexture_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( imageView == rhs.imageView ) && ( bufferView == rhs.bufferView ) && + ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); +# endif + } + + bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalTextureInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::BufferView bufferView = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + MTLTexture_id mtlTexture = {}; + }; + + template <> + struct CppType + { + using Type = ExportMetalTextureInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + struct ExportSemaphoreCreateInfo + { + using NativeType = VkExportSemaphoreCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleTypes( handleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; +#else + bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExportSemaphoreCreateInfo; + }; + + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportSemaphoreWin32HandleInfoKHR + { + using NativeType = VkExportSemaphoreWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pAttributes, dwAccess, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ExportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExtensionProperties + { + using NativeType = VkExtensionProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const & extensionName_ = {}, + uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : extensionName( extensionName_ ) + , specVersion( specVersion_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExtensionProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( extensionName, specVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExtensionProperties const & ) const = default; +#else + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion ); +# endif + } + + bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; + uint32_t specVersion = {}; + }; + + struct ExternalMemoryProperties + { + using NativeType = VkExternalMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryProperties const & ) const = default; +#else + bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); +# endif + } + + bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; + }; + + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + + struct ExternalBufferProperties + { + using NativeType = VkExternalBufferProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , externalMemoryProperties( externalMemoryProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalBufferProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalBufferProperties const & ) const = default; +#else + bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); +# endif + } + + bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + + template <> + struct CppType + { + using Type = ExternalBufferProperties; + }; + + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + struct ExternalFenceProperties + { + using NativeType = VkExternalFenceProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalFenceFeatures( externalFenceFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFenceProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFenceProperties const & ) const = default; +#else + bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalFenceFeatures == rhs.externalFenceFeatures ); +# endif + } + + bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; + }; + + template <> + struct CppType + { + using Type = ExternalFenceProperties; + }; + + using ExternalFencePropertiesKHR = ExternalFenceProperties; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ExternalFormatANDROID + { + using NativeType = VkExternalFormatANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , externalFormat( externalFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFormatANDROID( *reinterpret_cast( &rhs ) ) + { + } + + ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatANDROID const & ) const = default; +# else + bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); +# endif + } + + bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void * pNext = {}; + uint64_t externalFormat = {}; + }; + + template <> + struct CppType + { + using Type = ExternalFormatANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ExternalFormatQNX + { + using NativeType = VkExternalFormatQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatQNX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatQNX( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , externalFormat( externalFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalFormatQNX( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalFormatQNX( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatQNX( *reinterpret_cast( &rhs ) ) {} + + ExternalFormatQNX & operator=( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalFormatQNX & operator=( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExternalFormatQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatQNX const & ) const = default; +# else + bool operator==( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); +# endif + } + + bool operator!=( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatQNX; + void * pNext = {}; + uint64_t externalFormat = {}; + }; + + template <> + struct CppType + { + using Type = ExternalFormatQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + struct ExternalImageFormatProperties + { + using NativeType = VkExternalImageFormatProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , externalMemoryProperties( externalMemoryProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatProperties const & ) const = default; +#else + bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); +# endif + } + + bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + + template <> + struct CppType + { + using Type = ExternalImageFormatProperties; + }; + + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + struct ImageFormatProperties + { + using NativeType = VkImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, + uint32_t maxMipLevels_ = {}, + uint32_t maxArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxExtent( maxExtent_ ) + , maxMipLevels( maxMipLevels_ ) + , maxArrayLayers( maxArrayLayers_ ) + , sampleCounts( sampleCounts_ ) + , maxResourceSize( maxResourceSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties( *reinterpret_cast( &rhs ) ) + { + } + + ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties const & ) const = default; +#else + bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && ( maxArrayLayers == rhs.maxArrayLayers ) && + ( sampleCounts == rhs.sampleCounts ) && ( maxResourceSize == rhs.maxResourceSize ); +# endif + } + + bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + }; + + struct ExternalImageFormatPropertiesNV + { + using NativeType = VkExternalImageFormatPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + , externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( imageFormatProperties == rhs.imageFormatProperties ) && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); +# endif + } + + bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + }; + + struct ExternalMemoryAcquireUnmodifiedEXT + { + using NativeType = VkExternalMemoryAcquireUnmodifiedEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryAcquireUnmodifiedEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , acquireUnmodifiedMemory( acquireUnmodifiedMemory_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryAcquireUnmodifiedEXT( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryAcquireUnmodifiedEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryAcquireUnmodifiedEXT & operator=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryAcquireUnmodifiedEXT & operator=( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & + setAcquireUnmodifiedMemory( VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ ) VULKAN_HPP_NOEXCEPT + { + acquireUnmodifiedMemory = acquireUnmodifiedMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExternalMemoryAcquireUnmodifiedEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryAcquireUnmodifiedEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, acquireUnmodifiedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryAcquireUnmodifiedEXT const & ) const = default; +#else + bool operator==( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireUnmodifiedMemory == rhs.acquireUnmodifiedMemory ); +# endif + } + + bool operator!=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryAcquireUnmodifiedEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory = {}; + }; + + template <> + struct CppType + { + using Type = ExternalMemoryAcquireUnmodifiedEXT; + }; + + struct ExternalMemoryBufferCreateInfo + { + using NativeType = VkExternalMemoryBufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleTypes( handleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryBufferCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExternalMemoryBufferCreateInfo; + }; + + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + struct ExternalMemoryImageCreateInfo + { + using NativeType = VkExternalMemoryImageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleTypes( handleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfo; + }; + + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + struct ExternalMemoryImageCreateInfoNV + { + using NativeType = VkExternalMemoryImageCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleTypes( handleTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + + template <> + struct CppType + { + using Type = ExternalMemoryImageCreateInfoNV; + }; + + struct ExternalSemaphoreProperties + { + using NativeType = VkExternalSemaphoreProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalSemaphoreProperties( *reinterpret_cast( &rhs ) ) + { + } + + ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalSemaphoreProperties const & ) const = default; +#else + bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); +# endif + } + + bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; + }; + + template <> + struct CppType + { + using Type = ExternalSemaphoreProperties; + }; + + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + struct FenceCreateInfo + { + using NativeType = VkFenceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FenceCreateInfo( *reinterpret_cast( &rhs ) ) {} + + FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceCreateInfo const & ) const = default; +#else + bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = FenceCreateInfo; + }; + + struct FenceGetFdInfoKHR + { + using NativeType = VkFenceGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fence( fence_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} + + FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetFdInfoKHR const & ) const = default; +#else + bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = FenceGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct FenceGetWin32HandleInfoKHR + { + using NativeType = VkFenceGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fence( fence_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = FenceGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct FilterCubicImageViewImageFormatPropertiesEXT + { + using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , filterCubic( filterCubic_ ) + , filterCubicMinmax( filterCubicMinmax_ ) + { + } + + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterCubic, filterCubicMinmax ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; +#else + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && ( filterCubicMinmax == rhs.filterCubicMinmax ); +# endif + } + + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; + }; + + template <> + struct CppType + { + using Type = FilterCubicImageViewImageFormatPropertiesEXT; + }; + + struct FormatProperties + { + using NativeType = VkFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures( linearTilingFeatures_ ) + , optimalTilingFeatures( optimalTilingFeatures_ ) + , bufferFeatures( bufferFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties( *reinterpret_cast( &rhs ) ) {} + + FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties const & ) const = default; +#else + bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && + ( bufferFeatures == rhs.bufferFeatures ); +# endif + } + + bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + }; + + struct FormatProperties2 + { + using NativeType = VkFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , formatProperties( formatProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties2( *reinterpret_cast( &rhs ) ) {} + + FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties2 const & ) const = default; +#else + bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); +# endif + } + + bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; + }; + + template <> + struct CppType + { + using Type = FormatProperties2; + }; + + using FormatProperties2KHR = FormatProperties2; + + struct FormatProperties3 + { + using NativeType = VkFormatProperties3; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties3( VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , linearTilingFeatures( linearTilingFeatures_ ) + , optimalTilingFeatures( optimalTilingFeatures_ ) + , bufferFeatures( bufferFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties3( *reinterpret_cast( &rhs ) ) {} + + FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties3 const & ) const = default; +#else + bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) && + ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); +# endif + } + + bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {}; + }; + + template <> + struct CppType + { + using Type = FormatProperties3; + }; + + using FormatProperties3KHR = FormatProperties3; + + struct FragmentShadingRateAttachmentInfoKHR + { + using NativeType = VkFragmentShadingRateAttachmentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ) + , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & + setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & + setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default; +#else + bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); +# endif + } + + bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = FragmentShadingRateAttachmentInfoKHR; + }; + + struct FrameBoundaryEXT + { + using NativeType = VkFrameBoundaryEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFrameBoundaryEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_ = {}, + uint64_t frameID_ = {}, + uint32_t imageCount_ = {}, + const VULKAN_HPP_NAMESPACE::Image * pImages_ = {}, + uint32_t bufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , frameID( frameID_ ) + , imageCount( imageCount_ ) + , pImages( pImages_ ) + , bufferCount( bufferCount_ ) + , pBuffers( pBuffers_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + { + } + + VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FrameBoundaryEXT( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT : FrameBoundaryEXT( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + FrameBoundaryEXT( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_, + uint64_t frameID_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & images_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & buffers_ = {}, + uint64_t tagName_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , frameID( frameID_ ) + , imageCount( static_cast( images_.size() ) ) + , pImages( images_.data() ) + , bufferCount( static_cast( buffers_.size() ) ) + , pBuffers( buffers_.data() ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FrameBoundaryEXT & operator=( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FrameBoundaryEXT & operator=( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFlags( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFrameID( uint64_t frameID_ ) VULKAN_HPP_NOEXCEPT + { + frameID = frameID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setImageCount( uint32_t imageCount_ ) VULKAN_HPP_NOEXCEPT + { + imageCount = imageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPImages( const VULKAN_HPP_NAMESPACE::Image * pImages_ ) VULKAN_HPP_NOEXCEPT + { + pImages = pImages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FrameBoundaryEXT & setImages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & images_ ) VULKAN_HPP_NOEXCEPT + { + imageCount = static_cast( images_.size() ); + pImages = images_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferCount = bufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPBuffers( const VULKAN_HPP_NAMESPACE::Buffer * pBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pBuffers = pBuffers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FrameBoundaryEXT & setBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & buffers_ ) VULKAN_HPP_NOEXCEPT + { + bufferCount = static_cast( buffers_.size() ); + pBuffers = buffers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + FrameBoundaryEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkFrameBoundaryEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFrameBoundaryEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, frameID, imageCount, pImages, bufferCount, pBuffers, tagName, tagSize, pTag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FrameBoundaryEXT const & ) const = default; +#else + bool operator==( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( frameID == rhs.frameID ) && ( imageCount == rhs.imageCount ) && + ( pImages == rhs.pImages ) && ( bufferCount == rhs.bufferCount ) && ( pBuffers == rhs.pBuffers ) && ( tagName == rhs.tagName ) && + ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); +# endif + } + + bool operator!=( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFrameBoundaryEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags = {}; + uint64_t frameID = {}; + uint32_t imageCount = {}; + const VULKAN_HPP_NAMESPACE::Image * pImages = {}; + uint32_t bufferCount = {}; + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; + }; + + template <> + struct CppType + { + using Type = FrameBoundaryEXT; + }; + + struct FramebufferAttachmentImageInfo + { + using NativeType = VkFramebufferAttachmentImageInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + { + } + + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentImageInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + uint32_t width_, + uint32_t height_, + uint32_t layerCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( static_cast( viewFormats_.size() ) ) + , pViewFormats( viewFormats_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo & + setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( width == rhs.width ) && + ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); +# endif + } + + bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + + template <> + struct CppType + { + using Type = FramebufferAttachmentImageInfo; + }; + + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + + struct FramebufferAttachmentsCreateInfo + { + using NativeType = VkFramebufferAttachmentsCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , attachmentImageInfoCount( attachmentImageInfoCount_ ) + , pAttachmentImageInfos( pAttachmentImageInfos_ ) + { + } + + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentsCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ) + , pAttachmentImageInfos( attachmentImageInfos_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = attachmentImageInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & + setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentImageInfos = pAttachmentImageInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); + pAttachmentImageInfos = attachmentImageInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && + ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); +# endif + } + + bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; + const void * pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; + }; + + template <> + struct CppType + { + using Type = FramebufferAttachmentsCreateInfo; + }; + + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + + struct FramebufferCreateInfo + { + using NativeType = VkFramebufferCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + { + } + + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferCreateInfo & + setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT + { + layers = layers_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferCreateInfo const & ) const = default; +#else + bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( layers == rhs.layers ); +# endif + } + + bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; + }; + + template <> + struct CppType + { + using Type = FramebufferCreateInfo; + }; + + struct FramebufferMixedSamplesCombinationNV + { + using NativeType = VkFramebufferMixedSamplesCombinationNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , coverageReductionMode( coverageReductionMode_ ) + , rasterizationSamples( rasterizationSamples_ ) + , depthStencilSamples( depthStencilSamples_ ) + , colorSamples( colorSamples_ ) + { + } + + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferMixedSamplesCombinationNV( *reinterpret_cast( &rhs ) ) + { + } + + FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; +#else + bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); +# endif + } + + bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + }; + + template <> + struct CppType + { + using Type = FramebufferMixedSamplesCombinationNV; + }; + + struct IndirectCommandsStreamNV + { + using NativeType = VkIndirectCommandsStreamNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsStreamNV( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( buffer, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsStreamNV const & ) const = default; +#else + bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); +# endif + } + + bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + + struct GeneratedCommandsInfoNV + { + using NativeType = VkGeneratedCommandsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t streamCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( streamCount_ ) + , pStreams( pStreams_ ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( static_cast( streams_.size() ) ) + , pStreams( streams_.data() ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT + { + pStreams = pStreams_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV & + setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streams_.size() ); + pStreams = streams_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCount = sequencesCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT + { + preprocessBuffer = preprocessBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + { + preprocessOffset = preprocessOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + pipelineBindPoint, + pipeline, + indirectCommandsLayout, + streamCount, + pStreams, + sequencesCount, + preprocessBuffer, + preprocessOffset, + preprocessSize, + sequencesCountBuffer, + sequencesCountOffset, + sequencesIndexBuffer, + sequencesIndexOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && + ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && ( preprocessOffset == rhs.preprocessOffset ) && + ( preprocessSize == rhs.preprocessSize ) && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && + ( sequencesCountOffset == rhs.sequencesCountOffset ) && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && + ( sequencesIndexOffset == rhs.sequencesIndexOffset ); +# endif + } + + bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsInfoNV; + }; + + struct GeneratedCommandsMemoryRequirementsInfoNV + { + using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , maxSequencesCount( maxSequencesCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequencesCount = maxSequencesCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequencesCount == rhs.maxSequencesCount ); +# endif + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsMemoryRequirementsInfoNV; + }; + + struct LatencyTimingsFrameReportNV + { + using NativeType = VkLatencyTimingsFrameReportNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencyTimingsFrameReportNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( uint64_t presentID_ = {}, + uint64_t inputSampleTimeUs_ = {}, + uint64_t simStartTimeUs_ = {}, + uint64_t simEndTimeUs_ = {}, + uint64_t renderSubmitStartTimeUs_ = {}, + uint64_t renderSubmitEndTimeUs_ = {}, + uint64_t presentStartTimeUs_ = {}, + uint64_t presentEndTimeUs_ = {}, + uint64_t driverStartTimeUs_ = {}, + uint64_t driverEndTimeUs_ = {}, + uint64_t osRenderQueueStartTimeUs_ = {}, + uint64_t osRenderQueueEndTimeUs_ = {}, + uint64_t gpuRenderStartTimeUs_ = {}, + uint64_t gpuRenderEndTimeUs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentID( presentID_ ) + , inputSampleTimeUs( inputSampleTimeUs_ ) + , simStartTimeUs( simStartTimeUs_ ) + , simEndTimeUs( simEndTimeUs_ ) + , renderSubmitStartTimeUs( renderSubmitStartTimeUs_ ) + , renderSubmitEndTimeUs( renderSubmitEndTimeUs_ ) + , presentStartTimeUs( presentStartTimeUs_ ) + , presentEndTimeUs( presentEndTimeUs_ ) + , driverStartTimeUs( driverStartTimeUs_ ) + , driverEndTimeUs( driverEndTimeUs_ ) + , osRenderQueueStartTimeUs( osRenderQueueStartTimeUs_ ) + , osRenderQueueEndTimeUs( osRenderQueueEndTimeUs_ ) + , gpuRenderStartTimeUs( gpuRenderStartTimeUs_ ) + , gpuRenderEndTimeUs( gpuRenderEndTimeUs_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencyTimingsFrameReportNV( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencyTimingsFrameReportNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencyTimingsFrameReportNV & operator=( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencyTimingsFrameReportNV & operator=( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setInputSampleTimeUs( uint64_t inputSampleTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + inputSampleTimeUs = inputSampleTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setSimStartTimeUs( uint64_t simStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + simStartTimeUs = simStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setSimEndTimeUs( uint64_t simEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + simEndTimeUs = simEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setRenderSubmitStartTimeUs( uint64_t renderSubmitStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + renderSubmitStartTimeUs = renderSubmitStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setRenderSubmitEndTimeUs( uint64_t renderSubmitEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + renderSubmitEndTimeUs = renderSubmitEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentStartTimeUs( uint64_t presentStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + presentStartTimeUs = presentStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setPresentEndTimeUs( uint64_t presentEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + presentEndTimeUs = presentEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setDriverStartTimeUs( uint64_t driverStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + driverStartTimeUs = driverStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setDriverEndTimeUs( uint64_t driverEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + driverEndTimeUs = driverEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setOsRenderQueueStartTimeUs( uint64_t osRenderQueueStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + osRenderQueueStartTimeUs = osRenderQueueStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setOsRenderQueueEndTimeUs( uint64_t osRenderQueueEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + osRenderQueueEndTimeUs = osRenderQueueEndTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setGpuRenderStartTimeUs( uint64_t gpuRenderStartTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + gpuRenderStartTimeUs = gpuRenderStartTimeUs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencyTimingsFrameReportNV & setGpuRenderEndTimeUs( uint64_t gpuRenderEndTimeUs_ ) VULKAN_HPP_NOEXCEPT + { + gpuRenderEndTimeUs = gpuRenderEndTimeUs_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencyTimingsFrameReportNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencyTimingsFrameReportNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + presentID, + inputSampleTimeUs, + simStartTimeUs, + simEndTimeUs, + renderSubmitStartTimeUs, + renderSubmitEndTimeUs, + presentStartTimeUs, + presentEndTimeUs, + driverStartTimeUs, + driverEndTimeUs, + osRenderQueueStartTimeUs, + osRenderQueueEndTimeUs, + gpuRenderStartTimeUs, + gpuRenderEndTimeUs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencyTimingsFrameReportNV const & ) const = default; +#else + bool operator==( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( inputSampleTimeUs == rhs.inputSampleTimeUs ) && + ( simStartTimeUs == rhs.simStartTimeUs ) && ( simEndTimeUs == rhs.simEndTimeUs ) && ( renderSubmitStartTimeUs == rhs.renderSubmitStartTimeUs ) && + ( renderSubmitEndTimeUs == rhs.renderSubmitEndTimeUs ) && ( presentStartTimeUs == rhs.presentStartTimeUs ) && + ( presentEndTimeUs == rhs.presentEndTimeUs ) && ( driverStartTimeUs == rhs.driverStartTimeUs ) && ( driverEndTimeUs == rhs.driverEndTimeUs ) && + ( osRenderQueueStartTimeUs == rhs.osRenderQueueStartTimeUs ) && ( osRenderQueueEndTimeUs == rhs.osRenderQueueEndTimeUs ) && + ( gpuRenderStartTimeUs == rhs.gpuRenderStartTimeUs ) && ( gpuRenderEndTimeUs == rhs.gpuRenderEndTimeUs ); +# endif + } + + bool operator!=( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencyTimingsFrameReportNV; + const void * pNext = {}; + uint64_t presentID = {}; + uint64_t inputSampleTimeUs = {}; + uint64_t simStartTimeUs = {}; + uint64_t simEndTimeUs = {}; + uint64_t renderSubmitStartTimeUs = {}; + uint64_t renderSubmitEndTimeUs = {}; + uint64_t presentStartTimeUs = {}; + uint64_t presentEndTimeUs = {}; + uint64_t driverStartTimeUs = {}; + uint64_t driverEndTimeUs = {}; + uint64_t osRenderQueueStartTimeUs = {}; + uint64_t osRenderQueueEndTimeUs = {}; + uint64_t gpuRenderStartTimeUs = {}; + uint64_t gpuRenderEndTimeUs = {}; + }; + + template <> + struct CppType + { + using Type = LatencyTimingsFrameReportNV; + }; + + struct GetLatencyMarkerInfoNV + { + using NativeType = VkGetLatencyMarkerInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGetLatencyMarkerInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( uint32_t timingCount_ = {}, + VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , timingCount( timingCount_ ) + , pTimings( pTimings_ ) + { + } + + VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GetLatencyMarkerInfoNV( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GetLatencyMarkerInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & timings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), timingCount( static_cast( timings_.size() ) ), pTimings( timings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GetLatencyMarkerInfoNV & operator=( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GetLatencyMarkerInfoNV & operator=( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setTimingCount( uint32_t timingCount_ ) VULKAN_HPP_NOEXCEPT + { + timingCount = timingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPTimings( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ ) VULKAN_HPP_NOEXCEPT + { + pTimings = pTimings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GetLatencyMarkerInfoNV & + setTimings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & timings_ ) VULKAN_HPP_NOEXCEPT + { + timingCount = static_cast( timings_.size() ); + pTimings = timings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, timingCount, pTimings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GetLatencyMarkerInfoNV const & ) const = default; +#else + bool operator==( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timingCount == rhs.timingCount ) && ( pTimings == rhs.pTimings ); +# endif + } + + bool operator!=( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGetLatencyMarkerInfoNV; + const void * pNext = {}; + uint32_t timingCount = {}; + VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings = {}; + }; + + template <> + struct CppType + { + using Type = GetLatencyMarkerInfoNV; + }; + + struct VertexInputBindingDescription + { + using NativeType = VkVertexInputBindingDescription; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + { + } + + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, stride, inputRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription const & ) const = default; +#else + bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); +# endif + } + + bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + }; + + struct VertexInputAttributeDescription + { + using NativeType = VkVertexInputAttributeDescription; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + { + } + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( location, binding, format, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset ); +# endif + } + + bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + + struct PipelineVertexInputStateCreateInfo + { + using NativeType = VkPipelineVertexInputStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, + uint32_t vertexBindingDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, + uint32_t vertexAttributeDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) + , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) + , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) + , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ) + , pVertexBindingDescriptions( vertexBindingDescriptions_.data() ) + , vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ) + , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_ ) + VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); + pVertexBindingDescriptions = vertexBindingDescriptions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ ) + VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); + pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && + ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); +# endif + } + + bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; + }; + + template <> + struct CppType + { + using Type = PipelineVertexInputStateCreateInfo; + }; + + struct PipelineInputAssemblyStateCreateInfo + { + using NativeType = VkPipelineInputAssemblyStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , topology( topology_ ) + , primitiveRestartEnable( primitiveRestartEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + { + topology = topology_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + { + primitiveRestartEnable = primitiveRestartEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, topology, primitiveRestartEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( topology == rhs.topology ) && + ( primitiveRestartEnable == rhs.primitiveRestartEnable ); +# endif + } + + bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineInputAssemblyStateCreateInfo; + }; + + struct PipelineTessellationStateCreateInfo + { + using NativeType = VkPipelineTessellationStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, + uint32_t patchControlPoints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , patchControlPoints( patchControlPoints_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineTessellationStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + patchControlPoints = patchControlPoints_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, patchControlPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( patchControlPoints == rhs.patchControlPoints ); +# endif + } + + bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; + }; + + template <> + struct CppType + { + using Type = PipelineTessellationStateCreateInfo; + }; + + struct PipelineViewportStateCreateInfo + { + using NativeType = VkPipelineViewportStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, + uint32_t scissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewports( pViewports_ ) + , scissorCount( scissorCount_ ) + , pScissors( pScissors_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , viewportCount( static_cast( viewports_.size() ) ) + , pViewports( viewports_.data() ) + , scissorCount( static_cast( scissors_.size() ) ) + , pScissors( scissors_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT + { + pViewports = pViewports_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewports_.size() ); + pViewports = viewports_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT + { + scissorCount = scissorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT + { + pScissors = pScissors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ ) VULKAN_HPP_NOEXCEPT + { + scissorCount = static_cast( scissors_.size() ); + pScissors = scissors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && + ( pViewports == rhs.pViewports ) && ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); +# endif + } + + bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {}; + uint32_t scissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportStateCreateInfo; + }; + + struct PipelineRasterizationStateCreateInfo + { + using NativeType = VkPipelineRasterizationStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, + VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, + float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + float lineWidth_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , depthClampEnable( depthClampEnable_ ) + , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) + , polygonMode( polygonMode_ ) + , cullMode( cullMode_ ) + , frontFace( frontFace_ ) + , depthBiasEnable( depthBiasEnable_ ) + , depthBiasConstantFactor( depthBiasConstantFactor_ ) + , depthBiasClamp( depthBiasClamp_ ) + , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) + , lineWidth( lineWidth_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClampEnable = depthClampEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT + { + rasterizerDiscardEnable = rasterizerDiscardEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT + { + polygonMode = polygonMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT + { + cullMode = cullMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT + { + frontFace = frontFace_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasEnable = depthBiasEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT + { + lineWidth = lineWidth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + depthClampEnable, + rasterizerDiscardEnable, + polygonMode, + cullMode, + frontFace, + depthBiasEnable, + depthBiasConstantFactor, + depthBiasClamp, + depthBiasSlopeFactor, + lineWidth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClampEnable == rhs.depthClampEnable ) && + ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && ( cullMode == rhs.cullMode ) && + ( frontFace == rhs.frontFace ) && ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && ( lineWidth == rhs.lineWidth ); +# endif + } + + bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; + VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + float lineWidth = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationStateCreateInfo; + }; + + struct PipelineMultisampleStateCreateInfo + { + using NativeType = VkPipelineMultisampleStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, + float minSampleShading_ = {}, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , rasterizationSamples( rasterizationSamples_ ) + , sampleShadingEnable( sampleShadingEnable_ ) + , minSampleShading( minSampleShading_ ) + , pSampleMask( pSampleMask_ ) + , alphaToCoverageEnable( alphaToCoverageEnable_ ) + , alphaToOneEnable( alphaToOneEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineMultisampleStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleShadingEnable = sampleShadingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT + { + minSampleShading = minSampleShading_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT + { + pSampleMask = pSampleMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToCoverageEnable = alphaToCoverageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOneEnable = alphaToOneEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationSamples == rhs.rasterizationSamples ) && + ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && ( pSampleMask == rhs.pSampleMask ) && + ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && ( alphaToOneEnable == rhs.alphaToOneEnable ); +# endif + } + + bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; + float minSampleShading = {}; + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineMultisampleStateCreateInfo; + }; + + struct StencilOpState + { + using NativeType = VkStencilOpState; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + uint32_t compareMask_ = {}, + uint32_t writeMask_ = {}, + uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT + : failOp( failOp_ ) + , passOp( passOp_ ) + , depthFailOp( depthFailOp_ ) + , compareOp( compareOp_ ) + , compareMask( compareMask_ ) + , writeMask( writeMask_ ) + , reference( reference_ ) + { + } + + VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT : StencilOpState( *reinterpret_cast( &rhs ) ) {} + + StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT + { + failOp = failOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT + { + passOp = passOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT + { + depthFailOp = depthFailOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT + { + compareMask = compareMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT + { + writeMask = writeMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT + { + reference = reference_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StencilOpState const & ) const = default; +#else + bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && ( compareOp == rhs.compareOp ) && + ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && ( reference == rhs.reference ); +# endif + } + + bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + uint32_t compareMask = {}; + uint32_t writeMask = {}; + uint32_t reference = {}; + }; + + struct PipelineDepthStencilStateCreateInfo + { + using NativeType = VkPipelineDepthStencilStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, + float minDepthBounds_ = {}, + float maxDepthBounds_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , depthTestEnable( depthTestEnable_ ) + , depthWriteEnable( depthWriteEnable_ ) + , depthCompareOp( depthCompareOp_ ) + , depthBoundsTestEnable( depthBoundsTestEnable_ ) + , stencilTestEnable( stencilTestEnable_ ) + , front( front_ ) + , back( back_ ) + , minDepthBounds( minDepthBounds_ ) + , maxDepthBounds( maxDepthBounds_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDepthStencilStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthTestEnable = depthTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthWriteEnable = depthWriteEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT + { + depthCompareOp = depthCompareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBoundsTestEnable = depthBoundsTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + stencilTestEnable = stencilTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT + { + front = front_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT + { + back = back_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + minDepthBounds = minDepthBounds_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + maxDepthBounds = maxDepthBounds_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + depthTestEnable, + depthWriteEnable, + depthCompareOp, + depthBoundsTestEnable, + stencilTestEnable, + front, + back, + minDepthBounds, + maxDepthBounds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthTestEnable == rhs.depthTestEnable ) && + ( depthWriteEnable == rhs.depthWriteEnable ) && ( depthCompareOp == rhs.depthCompareOp ) && + ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && + ( back == rhs.back ) && ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); +# endif + } + + bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; + VULKAN_HPP_NAMESPACE::StencilOpState front = {}; + VULKAN_HPP_NAMESPACE::StencilOpState back = {}; + float minDepthBounds = {}; + float maxDepthBounds = {}; + }; + + template <> + struct CppType + { + using Type = PipelineDepthStencilStateCreateInfo; + }; + + struct PipelineColorBlendAttachmentState + { + using NativeType = VkPipelineColorBlendAttachmentState; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT + : blendEnable( blendEnable_ ) + , srcColorBlendFactor( srcColorBlendFactor_ ) + , dstColorBlendFactor( dstColorBlendFactor_ ) + , colorBlendOp( colorBlendOp_ ) + , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) + , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) + , alphaBlendOp( alphaBlendOp_ ) + , colorWriteMask( colorWriteMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAttachmentState( *reinterpret_cast( &rhs ) ) + { + } + + PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT + { + blendEnable = blendEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + colorBlendOp = colorBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + alphaBlendOp = alphaBlendOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteMask = colorWriteMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; +#else + bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && + ( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && + ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); +# endif + } + + bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; + }; + + struct PipelineColorBlendStateCreateInfo + { + using NativeType = VkPipelineColorBlendStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, + std::array const & blendConstants_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , blendConstants( blendConstants_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + std::array const & blendConstants_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , blendConstants( blendConstants_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT + { + logicOpEnable = logicOpEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT + { + blendConstants = blendConstants_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( logicOpEnable == rhs.logicOpEnable ) && + ( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( blendConstants == rhs.blendConstants ); +# endif + } + + bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; + VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; + }; + + template <> + struct CppType + { + using Type = PipelineColorBlendStateCreateInfo; + }; + + struct PipelineDynamicStateCreateInfo + { + using NativeType = VkPipelineDynamicStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, + uint32_t dynamicStateCount_ = {}, + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , dynamicStateCount( dynamicStateCount_ ) + , pDynamicStates( pDynamicStates_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDynamicStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = dynamicStateCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicStates = pDynamicStates_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo & + setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = static_cast( dynamicStates_.size() ); + pDynamicStates = dynamicStates_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dynamicStateCount == rhs.dynamicStateCount ) && + ( pDynamicStates == rhs.pDynamicStates ); +# endif + } + + bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; + uint32_t dynamicStateCount = {}; + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {}; + }; + + template <> + struct CppType + { + using Type = PipelineDynamicStateCreateInfo; + }; + + struct GraphicsPipelineCreateInfo + { + using NativeType = VkGraphicsPipelineCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT + { + pInputAssemblyState = pInputAssemblyState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT + { + pViewportState = pViewportState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT + { + pRasterizationState = pRasterizationState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT + { + pMultisampleState = pMultisampleState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilState = pDepthStencilState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT + { + pColorBlendState = pColorBlendState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + pVertexInputState, + pInputAssemblyState, + pTessellationState, + pViewportState, + pRasterizationState, + pMultisampleState, + pDepthStencilState, + pColorBlendState, + pDynamicState, + layout, + renderPass, + subpass, + basePipelineHandle, + basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; +#else + bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && + ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && + ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && + ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && ( pDynamicState == rhs.pDynamicState ) && + ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {}; + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = GraphicsPipelineCreateInfo; + }; + + struct GraphicsPipelineLibraryCreateInfoEXT + { + using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default; +#else + bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {}; + }; + + template <> + struct CppType + { + using Type = GraphicsPipelineLibraryCreateInfoEXT; + }; + + struct GraphicsShaderGroupCreateInfoNV + { + using NativeType = VkGraphicsShaderGroupCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + { + } + + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pTessellationState == rhs.pTessellationState ); +# endif + } + + bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void * pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + }; + + template <> + struct CppType + { + using Type = GraphicsShaderGroupCreateInfoNV; + }; + + struct GraphicsPipelineShaderGroupsCreateInfoNV + { + using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t pipelineCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , pipelineCount( pipelineCount_ ) + , pPipelines( pPipelines_ ) + { + } + + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , pipelineCount( static_cast( pipelines_.size() ) ) + , pPipelines( pipelines_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = pipelineCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT + { + pPipelines = pPipelines_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & + setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = static_cast( pipelines_.size() ); + pPipelines = pipelines_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; +#else + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && + ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); +# endif + } + + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void * pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {}; + }; + + template <> + struct CppType + { + using Type = GraphicsPipelineShaderGroupsCreateInfoNV; + }; + + struct XYColorEXT + { + using NativeType = VkXYColorEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + { + } + + VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT : XYColorEXT( *reinterpret_cast( &rhs ) ) {} + + XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( XYColorEXT const & ) const = default; +#else + bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ); +# endif + } + + bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + + struct HdrMetadataEXT + { + using NativeType = VkHdrMetadataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, + float maxLuminance_ = {}, + float minLuminance_ = {}, + float maxContentLightLevel_ = {}, + float maxFrameAverageLightLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , displayPrimaryRed( displayPrimaryRed_ ) + , displayPrimaryGreen( displayPrimaryGreen_ ) + , displayPrimaryBlue( displayPrimaryBlue_ ) + , whitePoint( whitePoint_ ) + , maxLuminance( maxLuminance_ ) + , minLuminance( minLuminance_ ) + , maxContentLightLevel( maxContentLightLevel_ ) + , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) + { + } + + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT( *reinterpret_cast( &rhs ) ) {} + + HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryRed = displayPrimaryRed_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryGreen = displayPrimaryGreen_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryBlue = displayPrimaryBlue_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT + { + whitePoint = whitePoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT + { + maxLuminance = maxLuminance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT + { + minLuminance = minLuminance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + displayPrimaryRed, + displayPrimaryGreen, + displayPrimaryBlue, + whitePoint, + maxLuminance, + minLuminance, + maxContentLightLevel, + maxFrameAverageLightLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrMetadataEXT const & ) const = default; +#else + bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && + ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && ( whitePoint == rhs.whitePoint ) && + ( maxLuminance == rhs.maxLuminance ) && ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && + ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); +# endif + } + + bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; + }; + + template <> + struct CppType + { + using Type = HdrMetadataEXT; + }; + + struct HeadlessSurfaceCreateInfoEXT + { + using NativeType = VkHeadlessSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; +#else + bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; + }; + + template <> + struct CppType + { + using Type = HeadlessSurfaceCreateInfoEXT; + }; + + struct HostImageCopyDevicePerformanceQueryEXT + { + using NativeType = VkHostImageCopyDevicePerformanceQueryEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQueryEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , optimalDeviceAccess( optimalDeviceAccess_ ) + , identicalMemoryLayout( identicalMemoryLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( HostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HostImageCopyDevicePerformanceQueryEXT( VkHostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageCopyDevicePerformanceQueryEXT( *reinterpret_cast( &rhs ) ) + { + } + + HostImageCopyDevicePerformanceQueryEXT & operator=( HostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HostImageCopyDevicePerformanceQueryEXT & operator=( VkHostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkHostImageCopyDevicePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostImageCopyDevicePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, optimalDeviceAccess, identicalMemoryLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostImageCopyDevicePerformanceQueryEXT const & ) const = default; +#else + bool operator==( HostImageCopyDevicePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalDeviceAccess == rhs.optimalDeviceAccess ) && + ( identicalMemoryLayout == rhs.identicalMemoryLayout ); +# endif + } + + bool operator!=( HostImageCopyDevicePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageCopyDevicePerformanceQueryEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout = {}; + }; + + template <> + struct CppType + { + using Type = HostImageCopyDevicePerformanceQueryEXT; + }; + + struct HostImageLayoutTransitionInfoEXT + { + using NativeType = VkHostImageLayoutTransitionInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , subresourceRange( subresourceRange_ ) + { + } + + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfoEXT( HostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HostImageLayoutTransitionInfoEXT( VkHostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageLayoutTransitionInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + HostImageLayoutTransitionInfoEXT & operator=( HostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HostImageLayoutTransitionInfoEXT & operator=( VkHostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkHostImageLayoutTransitionInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostImageLayoutTransitionInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, oldLayout, newLayout, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostImageLayoutTransitionInfoEXT const & ) const = default; +#else + bool operator==( HostImageLayoutTransitionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && + ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( HostImageLayoutTransitionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageLayoutTransitionInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = HostImageLayoutTransitionInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + struct IOSSurfaceCreateInfoMVK + { + using NativeType = VkIOSSurfaceCreateInfoMVK; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pView( pView_ ) + { + } + + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : IOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + { + } + + IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pView ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); +# endif + } + + bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + + template <> + struct CppType + { + using Type = IOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + + struct ImageBlit + { + using NativeType = VkImageBlit; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffsets( srcOffsets_ ) + , dstSubresource( dstSubresource_ ) + , dstOffsets( dstOffsets_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast( &rhs ) ) {} + + ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + srcOffsets = srcOffsets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dstOffsets = dstOffsets_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageBlit const & ) const = default; +#else + bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffsets == rhs.dstOffsets ); +# endif + } + + bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + }; + + struct ImageCaptureDescriptorDataInfoEXT + { + using NativeType = VkImageCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCaptureDescriptorDataInfoEXT( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageCaptureDescriptorDataInfoEXT & operator=( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCaptureDescriptorDataInfoEXT & operator=( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); +# endif + } + + bool operator!=( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + + template <> + struct CppType + { + using Type = ImageCaptureDescriptorDataInfoEXT; + }; + + struct ImageCompressionControlEXT + { + using NativeType = VkImageCompressionControlEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionControlEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ = {}, + uint32_t compressionControlPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , compressionControlPlaneCount( compressionControlPlaneCount_ ) + , pFixedRateFlags( pFixedRateFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCompressionControlEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fixedRateFlags_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , compressionControlPlaneCount( static_cast( fixedRateFlags_.size() ) ) + , pFixedRateFlags( fixedRateFlags_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + compressionControlPlaneCount = compressionControlPlaneCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & + setPFixedRateFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) VULKAN_HPP_NOEXCEPT + { + pFixedRateFlags = pFixedRateFlags_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCompressionControlEXT & setFixedRateFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT + { + compressionControlPlaneCount = static_cast( fixedRateFlags_.size() ); + pFixedRateFlags = fixedRateFlags_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCompressionControlEXT const & ) const = default; +#else + bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( compressionControlPlaneCount == rhs.compressionControlPlaneCount ) && ( pFixedRateFlags == rhs.pFixedRateFlags ); +# endif + } + + bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionControlEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags = {}; + uint32_t compressionControlPlaneCount = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags = {}; + }; + + template <> + struct CppType + { + using Type = ImageCompressionControlEXT; + }; + + struct ImageCompressionPropertiesEXT + { + using NativeType = VkImageCompressionPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags_ = {}, + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageCompressionFlags( imageCompressionFlags_ ) + , imageCompressionFixedRateFlags( imageCompressionFixedRateFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCompressionPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default; +#else + bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionFlags == rhs.imageCompressionFlags ) && + ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags ); +# endif + } + + bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {}; + }; + + template <> + struct CppType + { + using Type = ImageCompressionPropertiesEXT; + }; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImageFormatConstraintsInfoFUCHSIA + { + using NativeType = VkImageFormatConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, + uint64_t sysmemPixelFormat_ = {}, + uint32_t colorSpaceCount_ = {}, + const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageCreateInfo( imageCreateInfo_ ) + , requiredFormatFeatures( requiredFormatFeatures_ ) + , flags( flags_ ) + , sysmemPixelFormat( sysmemPixelFormat_ ) + , colorSpaceCount( colorSpaceCount_ ) + , pColorSpaces( pColorSpaces_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_, + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_, + uint64_t sysmemPixelFormat_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorSpaces_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , imageCreateInfo( imageCreateInfo_ ) + , requiredFormatFeatures( requiredFormatFeatures_ ) + , flags( flags_ ) + , sysmemPixelFormat( sysmemPixelFormat_ ) + , colorSpaceCount( static_cast( colorSpaces_.size() ) ) + , pColorSpaces( colorSpaces_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + imageCreateInfo = imageCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT + { + requiredFormatFeatures = requiredFormatFeatures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT + { + sysmemPixelFormat = sysmemPixelFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT + { + colorSpaceCount = colorSpaceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT + { + pColorSpaces = pColorSpaces_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT + { + colorSpaceCount = static_cast( colorSpaces_.size() ); + pColorSpaces = colorSpaces_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCreateInfo == rhs.imageCreateInfo ) && + ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && + ( colorSpaceCount == rhs.colorSpaceCount ) && ( pColorSpaces == rhs.pColorSpaces ); +# endif + } + + bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {}; + uint64_t sysmemPixelFormat = {}; + uint32_t colorSpaceCount = {}; + const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {}; + }; + + template <> + struct CppType + { + using Type = ImageFormatConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImageConstraintsInfoFUCHSIA + { + using NativeType = VkImageConstraintsInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( uint32_t formatConstraintsCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , formatConstraintsCount( formatConstraintsCount_ ) + , pFormatConstraints( pFormatConstraints_ ) + , bufferCollectionConstraints( bufferCollectionConstraints_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageConstraintsInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & formatConstraints_, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , formatConstraintsCount( static_cast( formatConstraints_.size() ) ) + , pFormatConstraints( formatConstraints_.data() ) + , bufferCollectionConstraints( bufferCollectionConstraints_ ) + , flags( flags_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT + { + formatConstraintsCount = formatConstraintsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & + setPFormatConstraints( const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT + { + pFormatConstraints = pFormatConstraints_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageConstraintsInfoFUCHSIA & setFormatConstraints( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & formatConstraints_ ) + VULKAN_HPP_NOEXCEPT + { + formatConstraintsCount = static_cast( formatConstraints_.size() ); + pFormatConstraints = formatConstraints_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & + setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT + { + bufferCollectionConstraints = bufferCollectionConstraints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatConstraintsCount == rhs.formatConstraintsCount ) && + ( pFormatConstraints == rhs.pFormatConstraints ) && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA; + const void * pNext = {}; + uint32_t formatConstraintsCount = {}; + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {}; + }; + + template <> + struct CppType + { + using Type = ImageConstraintsInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImageCopy + { + using NativeType = VkImageCopy; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast( &rhs ) ) {} + + ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageCopy const & ) const = default; +#else + bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + + struct SubresourceLayout + { + using NativeType = VkSubresourceLayout; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , size( size_ ) + , rowPitch( rowPitch_ ) + , arrayPitch( arrayPitch_ ) + , depthPitch( depthPitch_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout( *reinterpret_cast( &rhs ) ) {} + + SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setRowPitch( VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ ) VULKAN_HPP_NOEXCEPT + { + rowPitch = rowPitch_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setArrayPitch( VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ ) VULKAN_HPP_NOEXCEPT + { + arrayPitch = arrayPitch_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setDepthPitch( VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ ) VULKAN_HPP_NOEXCEPT + { + depthPitch = depthPitch_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, size, rowPitch, arrayPitch, depthPitch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceLayout const & ) const = default; +#else + bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && ( arrayPitch == rhs.arrayPitch ) && + ( depthPitch == rhs.depthPitch ); +# endif + } + + bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + }; + + struct ImageDrmFormatModifierExplicitCreateInfoEXT + { + using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , pPlaneLayouts( pPlaneLayouts_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ) + , pPlaneLayouts( planeLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pPlaneLayouts = pPlaneLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT & + setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); + pPlaneLayouts = planeLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( pPlaneLayouts == rhs.pPlaneLayouts ); +# endif + } + + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {}; + }; + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; + }; + + struct ImageDrmFormatModifierListCreateInfoEXT + { + using NativeType = VkImageDrmFormatModifierListCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, + const uint64_t * pDrmFormatModifiers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifiers( pDrmFormatModifiers_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + pDrmFormatModifiers = pDrmFormatModifiers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT & + setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); + pDrmFormatModifiers = drmFormatModifiers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); +# endif + } + + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + const void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t * pDrmFormatModifiers = {}; + }; + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierListCreateInfoEXT; + }; + + struct ImageDrmFormatModifierPropertiesEXT + { + using NativeType = VkImageDrmFormatModifierPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; +#else + bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); +# endif + } + + bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void * pNext = {}; + uint64_t drmFormatModifier = {}; + }; + + template <> + struct CppType + { + using Type = ImageDrmFormatModifierPropertiesEXT; + }; + + struct ImageFormatListCreateInfo + { + using NativeType = VkImageFormatListCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatListCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo & + setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewFormatCount, pViewFormats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatListCreateInfo const & ) const = default; +#else + bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); +# endif + } + + bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; + const void * pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + }; + + template <> + struct CppType + { + using Type = ImageFormatListCreateInfo; + }; + + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + + struct ImageFormatProperties2 + { + using NativeType = VkImageFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageFormatProperties( imageFormatProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties2( *reinterpret_cast( &rhs ) ) + { + } + + ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageFormatProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatProperties2 const & ) const = default; +#else + bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); +# endif + } + + bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + }; + + template <> + struct CppType + { + using Type = ImageFormatProperties2; + }; + + using ImageFormatProperties2KHR = ImageFormatProperties2; + + struct ImageMemoryBarrier + { + using NativeType = VkImageMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier( *reinterpret_cast( &rhs ) ) {} + + ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryBarrier const & ) const = default; +#else + bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = ImageMemoryBarrier; + }; + + struct ImageMemoryRequirementsInfo2 + { + using NativeType = VkImageMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + { + } + + ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); +# endif + } + + bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + + template <> + struct CppType + { + using Type = ImageMemoryRequirementsInfo2; + }; + + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImagePipeSurfaceCreateInfoFUCHSIA + { + using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, + zx_handle_t imagePipeHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , imagePipeHandle( imagePipeHandle_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT + { + imagePipeHandle = imagePipeHandle_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, imagePipeHandle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; + zx_handle_t imagePipeHandle = {}; + }; + + template <> + struct CppType + { + using Type = ImagePipeSurfaceCreateInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImagePlaneMemoryRequirementsInfo + { + using NativeType = VkImagePlaneMemoryRequirementsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , planeAspect( planeAspect_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, planeAspect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; +#else + bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); +# endif + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + + template <> + struct CppType + { + using Type = ImagePlaneMemoryRequirementsInfo; + }; + + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + + struct ImageResolve + { + using NativeType = VkImageResolve; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve( *reinterpret_cast( &rhs ) ) {} + + ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve const & ) const = default; +#else + bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + + struct ImageResolve2 + { + using NativeType = VkImageResolve2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve2( *reinterpret_cast( &rhs ) ) {} + + ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageResolve2 const & ) const = default; +#else + bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); +# endif + } + + bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + + template <> + struct CppType + { + using Type = ImageResolve2; + }; + + using ImageResolve2KHR = ImageResolve2; + + struct ImageSparseMemoryRequirementsInfo2 + { + using NativeType = VkImageSparseMemoryRequirementsInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + { + } + + ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; +#else + bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); +# endif + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + + template <> + struct CppType + { + using Type = ImageSparseMemoryRequirementsInfo2; + }; + + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + struct ImageStencilUsageCreateInfo + { + using NativeType = VkImageStencilUsageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stencilUsage( stencilUsage_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageStencilUsageCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT + { + stencilUsage = stencilUsage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stencilUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); +# endif + } + + bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + }; + + template <> + struct CppType + { + using Type = ImageStencilUsageCreateInfo; + }; + + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; + + struct ImageSwapchainCreateInfoKHR + { + using NativeType = VkImageSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchain( swapchain_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); +# endif + } + + bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + }; + + template <> + struct CppType + { + using Type = ImageSwapchainCreateInfoKHR; + }; + + struct ImageViewASTCDecodeModeEXT + { + using NativeType = VkImageViewASTCDecodeModeEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , decodeMode( decodeMode_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewASTCDecodeModeEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT + { + decodeMode = decodeMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decodeMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; +#else + bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); +# endif + } + + bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = ImageViewASTCDecodeModeEXT; + }; + + struct ImageViewAddressPropertiesNVX + { + using NativeType = VkImageViewAddressPropertiesNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceAddress( deviceAddress_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewAddressPropertiesNVX( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; +#else + bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ); +# endif + } + + bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewAddressPropertiesNVX; + }; + + struct ImageViewCaptureDescriptorDataInfoEXT + { + using NativeType = VkImageViewCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageView( imageView_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCaptureDescriptorDataInfoEXT( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewCaptureDescriptorDataInfoEXT & operator=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewCaptureDescriptorDataInfoEXT & operator=( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageViewCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ); +# endif + } + + bool operator!=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewCaptureDescriptorDataInfoEXT; + }; + + struct ImageViewCreateInfo + { + using NativeType = VkImageViewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , image( image_ ) + , viewType( viewType_ ) + , format( format_ ) + , components( components_ ) + , subresourceRange( subresourceRange_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT + { + viewType = viewType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewCreateInfo const & ) const = default; +#else + bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) && + ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewCreateInfo; + }; + + struct ImageViewHandleInfoNVX + { + using NativeType = VkImageViewHandleInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageView( imageView_ ) + , descriptorType( descriptorType_ ) + , sampler( sampler_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewHandleInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, descriptorType, sampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; +#else + bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( descriptorType == rhs.descriptorType ) && + ( sampler == rhs.sampler ); +# endif + } + + bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewHandleInfoNVX; + }; + + struct ImageViewMinLodCreateInfoEXT + { + using NativeType = VkImageViewMinLodCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minLod( minLod_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewMinLodCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minLod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); +# endif + } + + bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT; + const void * pNext = {}; + float minLod = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewMinLodCreateInfoEXT; + }; + + struct ImageViewSampleWeightCreateInfoQCOM + { + using NativeType = VkImageViewSampleWeightCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSampleWeightCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Offset2D filterCenter_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D filterSize_ = {}, + uint32_t numPhases_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , filterCenter( filterCenter_ ) + , filterSize( filterSize_ ) + , numPhases( numPhases_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( VULKAN_HPP_NAMESPACE::Offset2D const & filterCenter_ ) VULKAN_HPP_NOEXCEPT + { + filterCenter = filterCenter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( VULKAN_HPP_NAMESPACE::Extent2D const & filterSize_ ) VULKAN_HPP_NOEXCEPT + { + filterSize = filterSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) VULKAN_HPP_NOEXCEPT + { + numPhases = numPhases_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterCenter, filterSize, numPhases ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default; +#else + bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCenter == rhs.filterCenter ) && ( filterSize == rhs.filterSize ) && + ( numPhases == rhs.numPhases ); +# endif + } + + bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSampleWeightCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D filterCenter = {}; + VULKAN_HPP_NAMESPACE::Extent2D filterSize = {}; + uint32_t numPhases = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewSampleWeightCreateInfoQCOM; + }; + + struct ImageViewSlicedCreateInfoEXT + { + using NativeType = VkImageViewSlicedCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSlicedCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewSlicedCreateInfoEXT( uint32_t sliceOffset_ = {}, uint32_t sliceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , sliceOffset( sliceOffset_ ) + , sliceCount( sliceCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewSlicedCreateInfoEXT( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewSlicedCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewSlicedCreateInfoEXT & operator=( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewSlicedCreateInfoEXT & operator=( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceOffset( uint32_t sliceOffset_ ) VULKAN_HPP_NOEXCEPT + { + sliceOffset = sliceOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT + { + sliceCount = sliceCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageViewSlicedCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewSlicedCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sliceOffset, sliceCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewSlicedCreateInfoEXT const & ) const = default; +#else + bool operator==( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sliceOffset == rhs.sliceOffset ) && ( sliceCount == rhs.sliceCount ); +# endif + } + + bool operator!=( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSlicedCreateInfoEXT; + const void * pNext = {}; + uint32_t sliceOffset = {}; + uint32_t sliceCount = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewSlicedCreateInfoEXT; + }; + + struct ImageViewUsageCreateInfo + { + using NativeType = VkImageViewUsageCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , usage( usage_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewUsageCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; +#else + bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + }; + + template <> + struct CppType + { + using Type = ImageViewUsageCreateInfo; + }; + + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ImportAndroidHardwareBufferInfoANDROID + { + using NativeType = VkImportAndroidHardwareBufferInfoANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , buffer( buffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) + { + } + + ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + struct AHardwareBuffer * buffer = {}; + }; + + template <> + struct CppType + { + using Type = ImportAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ImportFenceFdInfoKHR + { + using NativeType = VkImportFenceFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceFdInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, flags, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; +#else + bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + + template <> + struct CppType + { + using Type = ImportFenceFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportFenceWin32HandleInfoKHR + { + using NativeType = VkImportFenceWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fence, flags, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ImportFenceWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportMemoryBufferCollectionFUCHSIA + { + using NativeType = VkImportMemoryBufferCollectionFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , collection( collection_ ) + , index( index_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + { + collection = collection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, collection, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default; +# else + bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); +# endif + } + + bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryBufferCollectionFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImportMemoryFdInfoKHR + { + using NativeType = VkImportMemoryFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleType( handleType_ ) + , fd( fd_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryFdInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; +#else + bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryFdInfoKHR; + }; + + struct ImportMemoryHostPointerInfoEXT + { + using NativeType = VkImportMemoryHostPointerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * pHostPointer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleType( handleType_ ) + , pHostPointer( pHostPointer_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryHostPointerInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, pHostPointer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; +#else + bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( pHostPointer == rhs.pHostPointer ); +# endif + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * pHostPointer = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryHostPointerInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoKHR + { + using NativeType = VkImportMemoryWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoNV + { + using NativeType = VkImportMemoryWin32HandleInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, + HANDLE handle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleType( handleType_ ) + , handle( handle_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ); +# endif + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; + HANDLE handle = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryWin32HandleInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportMemoryZirconHandleInfoFUCHSIA + { + using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + zx_handle_t handle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleType( handleType_ ) + , handle( handle_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType, handle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + zx_handle_t handle = {}; + }; + + template <> + struct CppType + { + using Type = ImportMemoryZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMetalBufferInfoEXT + { + using NativeType = VkImportMetalBufferInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalBufferInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , mtlBuffer( mtlBuffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT + { + mtlBuffer = mtlBuffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mtlBuffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlBuffer == rhs.mtlBuffer ); +# endif + } + + bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalBufferInfoEXT; + const void * pNext = {}; + MTLBuffer_id mtlBuffer = {}; + }; + + template <> + struct CppType + { + using Type = ImportMetalBufferInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMetalIOSurfaceInfoEXT + { + using NativeType = VkImportMetalIOSurfaceInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalIoSurfaceInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , ioSurface( ioSurface_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT + { + ioSurface = ioSurface_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ioSurface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ioSurface == rhs.ioSurface ); +# endif + } + + bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalIoSurfaceInfoEXT; + const void * pNext = {}; + IOSurfaceRef ioSurface = {}; + }; + + template <> + struct CppType + { + using Type = ImportMetalIOSurfaceInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMetalSharedEventInfoEXT + { + using NativeType = VkImportMetalSharedEventInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalSharedEventInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , mtlSharedEvent( mtlSharedEvent_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT + { + mtlSharedEvent = mtlSharedEvent_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mtlSharedEvent ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlSharedEvent == rhs.mtlSharedEvent ); +# endif + } + + bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalSharedEventInfoEXT; + const void * pNext = {}; + MTLSharedEvent_id mtlSharedEvent = {}; + }; + + template <> + struct CppType + { + using Type = ImportMetalSharedEventInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMetalTextureInfoEXT + { + using NativeType = VkImportMetalTextureInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalTextureInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + MTLTexture_id mtlTexture_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , plane( plane_ ) + , mtlTexture( mtlTexture_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT + { + plane = plane_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT + { + mtlTexture = mtlTexture_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, plane, mtlTexture ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); +# endif + } + + bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalTextureInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + MTLTexture_id mtlTexture = {}; + }; + + template <> + struct CppType + { + using Type = ImportMetalTextureInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ImportScreenBufferInfoQNX + { + using NativeType = VkImportScreenBufferInfoQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportScreenBufferInfoQNX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( struct _screen_buffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , buffer( buffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportScreenBufferInfoQNX( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportScreenBufferInfoQNX( *reinterpret_cast( &rhs ) ) + { + } + + ImportScreenBufferInfoQNX & operator=( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportScreenBufferInfoQNX & operator=( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setBuffer( struct _screen_buffer * buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportScreenBufferInfoQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportScreenBufferInfoQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportScreenBufferInfoQNX const & ) const = default; +# else + bool operator==( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportScreenBufferInfoQNX; + const void * pNext = {}; + struct _screen_buffer * buffer = {}; + }; + + template <> + struct CppType + { + using Type = ImportScreenBufferInfoQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + struct ImportSemaphoreFdInfoKHR + { + using NativeType = VkImportSemaphoreFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreFdInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, fd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; +#else + bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( fd == rhs.fd ); +# endif + } + + bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + + template <> + struct CppType + { + using Type = ImportSemaphoreFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportSemaphoreWin32HandleInfoKHR + { + using NativeType = VkImportSemaphoreWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, handle, name ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); +# endif + } + + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + + template <> + struct CppType + { + using Type = ImportSemaphoreWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportSemaphoreZirconHandleInfoFUCHSIA + { + using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + zx_handle_t zirconHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , zirconHandle( zirconHandle_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT + { + zirconHandle = zirconHandle_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 ); + } + + bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + zx_handle_t zirconHandle = {}; + }; + + template <> + struct CppType + { + using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct IndirectCommandsLayoutTokenNV + { + using NativeType = VkIndirectCommandsLayoutTokenNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, + uint32_t stream_ = {}, + uint32_t offset_ = {}, + uint32_t vertexBindingUnit_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, + uint32_t pushconstantOffset_ = {}, + uint32_t pushconstantSize_ = {}, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, + uint32_t indexTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, + const uint32_t * pIndexTypeValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( indexTypeCount_ ) + , pIndexTypes( pIndexTypes_ ) + , pIndexTypeValues( pIndexTypeValues_ ) + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutTokenNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, + uint32_t stream_, + uint32_t offset_, + uint32_t vertexBindingUnit_, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, + uint32_t pushconstantOffset_, + uint32_t pushconstantSize_, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( static_cast( indexTypes_.size() ) ) + , pIndexTypes( indexTypes_.data() ) + , pIndexTypeValues( indexTypeValues_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); +# else + if ( indexTypes_.size() != indexTypeValues_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT + { + tokenType = tokenType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT + { + stream = stream_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingUnit = vertexBindingUnit_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexDynamicStride = vertexDynamicStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantPipelineLayout = pushconstantPipelineLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantShaderStageFlags = pushconstantShaderStageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantOffset = pushconstantOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT + { + pushconstantSize = pushconstantSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + { + indirectStateFlags = indirectStateFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = indexTypeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypes = pIndexTypes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & + setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypes_.size() ); + pIndexTypes = indexTypes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypeValues = pIndexTypeValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & + setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeCount = static_cast( indexTypeValues_.size() ); + pIndexTypeValues = indexTypeValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + tokenType, + stream, + offset, + vertexBindingUnit, + vertexDynamicStride, + pushconstantPipelineLayout, + pushconstantShaderStageFlags, + pushconstantOffset, + pushconstantSize, + indirectStateFlags, + indexTypeCount, + pIndexTypes, + pIndexTypeValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && ( stream == rhs.stream ) && ( offset == rhs.offset ) && + ( vertexBindingUnit == rhs.vertexBindingUnit ) && ( vertexDynamicStride == rhs.vertexDynamicStride ) && + ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && + ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && + ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && ( pIndexTypes == rhs.pIndexTypes ) && + ( pIndexTypeValues == rhs.pIndexTypeValues ); +# endif + } + + bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {}; + const uint32_t * pIndexTypeValues = {}; + }; + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutTokenNV; + }; + + struct IndirectCommandsLayoutCreateInfoNV + { + using NativeType = VkIndirectCommandsLayoutCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, + uint32_t streamCount_ = {}, + const uint32_t * pStreamStrides_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( tokenCount_ ) + , pTokens( pTokens_ ) + , streamCount( streamCount_ ) + , pStreamStrides( pStreamStrides_ ) + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) + , streamCount( static_cast( streamStrides_.size() ) ) + , pStreamStrides( streamStrides_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = tokenCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT + { + pTokens = pTokens_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & + setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + { + pStreamStrides = pStreamStrides_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & + setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streamStrides_.size() ); + pStreamStrides = streamStrides_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( tokenCount == rhs.tokenCount ) && ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && ( pStreamStrides == rhs.pStreamStrides ); +# endif + } + + bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {}; + uint32_t streamCount = {}; + const uint32_t * pStreamStrides = {}; + }; + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutCreateInfoNV; + }; + + struct InitializePerformanceApiInfoINTEL + { + using NativeType = VkInitializePerformanceApiInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pUserData( pUserData_ ) + { + } + + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : InitializePerformanceApiInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pUserData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; +#else + bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); +# endif + } + + bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; + const void * pNext = {}; + void * pUserData = {}; + }; + + template <> + struct CppType + { + using Type = InitializePerformanceApiInfoINTEL; + }; + + struct InputAttachmentAspectReference + { + using NativeType = VkInputAttachmentAspectReference; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, + uint32_t inputAttachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : subpass( subpass_ ) + , inputAttachmentIndex( inputAttachmentIndex_ ) + , aspectMask( aspectMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + : InputAttachmentAspectReference( *reinterpret_cast( &rhs ) ) + { + } + + InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentIndex = inputAttachmentIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subpass, inputAttachmentIndex, aspectMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( InputAttachmentAspectReference const & ) const = default; +#else + bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && ( aspectMask == rhs.aspectMask ); +# endif + } + + bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpass = {}; + uint32_t inputAttachmentIndex = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + + struct InstanceCreateInfo + { + using NativeType = VkInstanceCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + { + } + + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationInfo = pApplicationInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & + setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & + setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) + return cmp; + if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledLayerCount; ++i ) + { + if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) + if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledExtensionCount; ++i ) + { + if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) + if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pApplicationInfo == rhs.pApplicationInfo ) && + ( enabledLayerCount == rhs.enabledLayerCount ) && + std::equal( ppEnabledLayerNames, + ppEnabledLayerNames + enabledLayerCount, + rhs.ppEnabledLayerNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + std::equal( ppEnabledExtensionNames, + ppEnabledExtensionNames + enabledExtensionCount, + rhs.ppEnabledExtensionNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ); + } + + bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + }; + + template <> + struct CppType + { + using Type = InstanceCreateInfo; + }; + + struct LatencySleepInfoNV + { + using NativeType = VkLatencySleepInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + LatencySleepInfoNV( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , signalSemaphore( signalSemaphore_ ) + , value( value_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySleepInfoNV( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySleepInfoNV( *reinterpret_cast( &rhs ) ) {} + + LatencySleepInfoNV & operator=( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencySleepInfoNV & operator=( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setSignalSemaphore( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphore = signalSemaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencySleepInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, signalSemaphore, value ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySleepInfoNV const & ) const = default; +#else + bool operator==( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( signalSemaphore == rhs.signalSemaphore ) && ( value == rhs.value ); +# endif + } + + bool operator!=( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore = {}; + uint64_t value = {}; + }; + + template <> + struct CppType + { + using Type = LatencySleepInfoNV; + }; + + struct LatencySleepModeInfoNV + { + using NativeType = VkLatencySleepModeInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepModeInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ = {}, + uint32_t minimumIntervalUs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , lowLatencyMode( lowLatencyMode_ ) + , lowLatencyBoost( lowLatencyBoost_ ) + , minimumIntervalUs( minimumIntervalUs_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySleepModeInfoNV( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySleepModeInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencySleepModeInfoNV & operator=( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencySleepModeInfoNV & operator=( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyMode( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ ) VULKAN_HPP_NOEXCEPT + { + lowLatencyMode = lowLatencyMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyBoost( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ ) VULKAN_HPP_NOEXCEPT + { + lowLatencyBoost = lowLatencyBoost_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setMinimumIntervalUs( uint32_t minimumIntervalUs_ ) VULKAN_HPP_NOEXCEPT + { + minimumIntervalUs = minimumIntervalUs_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencySleepModeInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepModeInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lowLatencyMode, lowLatencyBoost, minimumIntervalUs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySleepModeInfoNV const & ) const = default; +#else + bool operator==( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lowLatencyMode == rhs.lowLatencyMode ) && ( lowLatencyBoost == rhs.lowLatencyBoost ) && + ( minimumIntervalUs == rhs.minimumIntervalUs ); +# endif + } + + bool operator!=( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepModeInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost = {}; + uint32_t minimumIntervalUs = {}; + }; + + template <> + struct CppType + { + using Type = LatencySleepModeInfoNV; + }; + + struct LatencySubmissionPresentIdNV + { + using NativeType = VkLatencySubmissionPresentIdNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySubmissionPresentIdNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( uint64_t presentID_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentID( presentID_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySubmissionPresentIdNV( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySubmissionPresentIdNV( *reinterpret_cast( &rhs ) ) + { + } + + LatencySubmissionPresentIdNV & operator=( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencySubmissionPresentIdNV & operator=( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencySubmissionPresentIdNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySubmissionPresentIdNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySubmissionPresentIdNV const & ) const = default; +#else + bool operator==( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ); +# endif + } + + bool operator!=( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySubmissionPresentIdNV; + const void * pNext = {}; + uint64_t presentID = {}; + }; + + template <> + struct CppType + { + using Type = LatencySubmissionPresentIdNV; + }; + + struct LatencySurfaceCapabilitiesNV + { + using NativeType = VkLatencySurfaceCapabilitiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySurfaceCapabilitiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( uint32_t presentModeCount_ = {}, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentModeCount( presentModeCount_ ) + , pPresentModes( pPresentModes_ ) + { + } + + VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LatencySurfaceCapabilitiesNV( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySurfaceCapabilitiesNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LatencySurfaceCapabilitiesNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + LatencySurfaceCapabilitiesNV & operator=( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LatencySurfaceCapabilitiesNV & operator=( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = presentModeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LatencySurfaceCapabilitiesNV & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLatencySurfaceCapabilitiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySurfaceCapabilitiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LatencySurfaceCapabilitiesNV const & ) const = default; +#else + bool operator==( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySurfaceCapabilitiesNV; + const void * pNext = {}; + uint32_t presentModeCount = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = LatencySurfaceCapabilitiesNV; + }; + + struct LayerProperties + { + using NativeType = VkLayerProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array const & layerName_ = {}, + uint32_t specVersion_ = {}, + uint32_t implementationVersion_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : layerName( layerName_ ) + , specVersion( specVersion_ ) + , implementationVersion( implementationVersion_ ) + , description( description_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast( &rhs ) ) {} + + LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( layerName, specVersion, implementationVersion, description ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LayerProperties const & ) const = default; +#else + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && + ( description == rhs.description ); +# endif + } + + bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + + struct LayerSettingEXT + { + using NativeType = VkLayerSettingEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LayerSettingEXT( const char * pLayerName_ = {}, + const char * pSettingName_ = {}, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32, + uint32_t valueCount_ = {}, + const void * pValues_ = {} ) VULKAN_HPP_NOEXCEPT + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( valueCount_ ) + , pValues( pValues_ ) + { + } + + VULKAN_HPP_CONSTEXPR LayerSettingEXT( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerSettingEXT( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT : LayerSettingEXT( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + LayerSettingEXT( const char * pLayerName_, + const char * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() * sizeof( T ) ) ) + , pValues( values_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + LayerSettingEXT & operator=( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LayerSettingEXT & operator=( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPLayerName( const char * pLayerName_ ) VULKAN_HPP_NOEXCEPT + { + pLayerName = pLayerName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPSettingName( const char * pSettingName_ ) VULKAN_HPP_NOEXCEPT + { + pSettingName = pSettingName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setType( VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setValueCount( uint32_t valueCount_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = valueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() * sizeof( T ) ); + pValues = values_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLayerSettingEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerSettingEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pLayerName, pSettingName, type, valueCount, pValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( pLayerName != rhs.pLayerName ) + if ( auto cmp = strcmp( pLayerName, rhs.pLayerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( pSettingName != rhs.pSettingName ) + if ( auto cmp = strcmp( pSettingName, rhs.pSettingName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = type <=> rhs.type; cmp != 0 ) + return cmp; + if ( auto cmp = valueCount <=> rhs.valueCount; cmp != 0 ) + return cmp; + if ( auto cmp = pValues <=> rhs.pValues; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ( pLayerName == rhs.pLayerName ) || ( strcmp( pLayerName, rhs.pLayerName ) == 0 ) ) && + ( ( pSettingName == rhs.pSettingName ) || ( strcmp( pSettingName, rhs.pSettingName ) == 0 ) ) && ( type == rhs.type ) && + ( valueCount == rhs.valueCount ) && ( pValues == rhs.pValues ); + } + + bool operator!=( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + const char * pLayerName = {}; + const char * pSettingName = {}; + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32; + uint32_t valueCount = {}; + const void * pValues = {}; + }; + + struct LayerSettingsCreateInfoEXT + { + using NativeType = VkLayerSettingsCreateInfoEXT; + + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLayerSettingsCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( uint32_t settingCount_ = {}, + const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , settingCount( settingCount_ ) + , pSettings( pSettings_ ) + { + } + + VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerSettingsCreateInfoEXT( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : LayerSettingsCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingsCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & settings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), settingCount( static_cast( settings_.size() ) ), pSettings( settings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + LayerSettingsCreateInfoEXT & operator=( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + LayerSettingsCreateInfoEXT & operator=( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setSettingCount( uint32_t settingCount_ ) VULKAN_HPP_NOEXCEPT + { + settingCount = settingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPSettings( const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ ) VULKAN_HPP_NOEXCEPT + { + pSettings = pSettings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingsCreateInfoEXT & + setSettings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & settings_ ) VULKAN_HPP_NOEXCEPT + { + settingCount = static_cast( settings_.size() ); + pSettings = settings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkLayerSettingsCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerSettingsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, settingCount, pSettings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( LayerSettingsCreateInfoEXT const & ) const = default; +#else + bool operator==( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( settingCount == rhs.settingCount ) && ( pSettings == rhs.pSettings ); +# endif + } + + bool operator!=( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLayerSettingsCreateInfoEXT; + const void * pNext = {}; + uint32_t settingCount = {}; + const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings = {}; + }; + + template <> + struct CppType + { + using Type = LayerSettingsCreateInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + struct MacOSSurfaceCreateInfoMVK + { + using NativeType = VkMacOSSurfaceCreateInfoMVK; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pView( pView_ ) + { + } + + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : MacOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + { + } + + MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pView ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); +# endif + } + + bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; + }; + + template <> + struct CppType + { + using Type = MacOSSurfaceCreateInfoMVK; + }; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + struct MappedMemoryRange + { + using NativeType = VkMappedMemoryRange; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memory( memory_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT : MappedMemoryRange( *reinterpret_cast( &rhs ) ) {} + + MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MappedMemoryRange const & ) const = default; +#else + bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = MappedMemoryRange; + }; + + struct MemoryAllocateFlagsInfo + { + using NativeType = VkMemoryAllocateFlagsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, + uint32_t deviceMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , deviceMask( deviceMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateFlagsInfo( *reinterpret_cast( &rhs ) ) + { + } + + MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, deviceMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; +#else + bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( deviceMask == rhs.deviceMask ); +# endif + } + + bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; + uint32_t deviceMask = {}; + }; + + template <> + struct CppType + { + using Type = MemoryAllocateFlagsInfo; + }; + + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + + struct MemoryAllocateInfo + { + using NativeType = VkMemoryAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , allocationSize( allocationSize_ ) + , memoryTypeIndex( memoryTypeIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo( *reinterpret_cast( &rhs ) ) {} + + MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT + { + allocationSize = allocationSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryTypeIndex = memoryTypeIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allocationSize, memoryTypeIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryAllocateInfo const & ) const = default; +#else + bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeIndex == rhs.memoryTypeIndex ); +# endif + } + + bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; + }; + + template <> + struct CppType + { + using Type = MemoryAllocateInfo; + }; + + struct MemoryBarrier + { + using NativeType = VkMemoryBarrier; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier( *reinterpret_cast( &rhs ) ) {} + + MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcAccessMask, dstAccessMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryBarrier const & ) const = default; +#else + bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ); +# endif + } + + bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + }; + + template <> + struct CppType + { + using Type = MemoryBarrier; + }; + + struct MemoryDedicatedAllocateInfo + { + using NativeType = VkMemoryDedicatedAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + , buffer( buffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; +#else + bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = MemoryDedicatedAllocateInfo; + }; + + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + + struct MemoryDedicatedRequirements + { + using NativeType = VkMemoryDedicatedRequirements; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , prefersDedicatedAllocation( prefersDedicatedAllocation_ ) + , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedRequirements( *reinterpret_cast( &rhs ) ) + { + } + + MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryDedicatedRequirements const & ) const = default; +#else + bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && + ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); +# endif + } + + bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + }; + + template <> + struct CppType + { + using Type = MemoryDedicatedRequirements; + }; + + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + + struct MemoryFdPropertiesKHR + { + using NativeType = VkMemoryFdPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryTypeBits( memoryTypeBits_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryFdPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; +#else + bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryFdPropertiesKHR; + }; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct MemoryGetAndroidHardwareBufferInfoANDROID + { + using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memory( memory_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + + template <> + struct CppType + { + using Type = MemoryGetAndroidHardwareBufferInfoANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct MemoryGetFdInfoKHR + { + using NativeType = VkMemoryGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memory( memory_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} + + MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; +#else + bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetFdInfoKHR; + }; + + struct MemoryGetRemoteAddressInfoNV + { + using NativeType = VkMemoryGetRemoteAddressInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memory( memory_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetRemoteAddressInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default; +#else + bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetRemoteAddressInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct MemoryGetWin32HandleInfoKHR + { + using NativeType = VkMemoryGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memory( memory_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryGetZirconHandleInfoFUCHSIA + { + using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memory( memory_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct MemoryHeap + { + using NativeType = VkMemoryHeap; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHeap( *reinterpret_cast( &rhs ) ) {} + + MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( size, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHeap const & ) const = default; +#else + bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( size == rhs.size ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; + }; + + struct MemoryHostPointerPropertiesEXT + { + using NativeType = VkMemoryHostPointerPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryTypeBits( memoryTypeBits_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHostPointerPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; +#else + bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryHostPointerPropertiesEXT; + }; + + struct MemoryMapInfoKHR + { + using NativeType = VkMemoryMapInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryMapInfoKHR( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , memory( memory_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryMapInfoKHR( MemoryMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryMapInfoKHR( VkMemoryMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMapInfoKHR( *reinterpret_cast( &rhs ) ) {} + + MemoryMapInfoKHR & operator=( MemoryMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryMapInfoKHR & operator=( VkMemoryMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryMapInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMapInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, memory, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryMapInfoKHR const & ) const = default; +#else + bool operator==( MemoryMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && + ( size == rhs.size ); +# endif + } + + bool operator!=( MemoryMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = MemoryMapInfoKHR; + }; + + struct MemoryOpaqueCaptureAddressAllocateInfo + { + using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , opaqueCaptureAddress( opaqueCaptureAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; +#else + bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); +# endif + } + + bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + + template <> + struct CppType + { + using Type = MemoryOpaqueCaptureAddressAllocateInfo; + }; + + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + + struct MemoryPriorityAllocateInfoEXT + { + using NativeType = VkMemoryPriorityAllocateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , priority( priority_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryPriorityAllocateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT + { + priority = priority_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, priority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; +#else + bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); +# endif + } + + bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; + const void * pNext = {}; + float priority = {}; + }; + + template <> + struct CppType + { + using Type = MemoryPriorityAllocateInfoEXT; + }; + + struct MemoryRequirements + { + using NativeType = VkMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , alignment( alignment_ ) + , memoryTypeBits( memoryTypeBits_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements( *reinterpret_cast( &rhs ) ) {} + + MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( size, alignment, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements const & ) const = default; +#else + bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; + }; + + struct MemoryRequirements2 + { + using NativeType = VkMemoryRequirements2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryRequirements( memoryRequirements_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements2( *reinterpret_cast( &rhs ) ) + { + } + + MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements2 const & ) const = default; +#else + bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + + template <> + struct CppType + { + using Type = MemoryRequirements2; + }; + + using MemoryRequirements2KHR = MemoryRequirements2; + + struct MemoryType + { + using NativeType = VkMemoryType; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : propertyFlags( propertyFlags_ ) + , heapIndex( heapIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryType( *reinterpret_cast( &rhs ) ) {} + + MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryType &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( propertyFlags, heapIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryType const & ) const = default; +#else + bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); +# endif + } + + bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; + }; + + struct MemoryUnmapInfoKHR + { + using NativeType = VkMemoryUnmapInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryUnmapInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryUnmapInfoKHR( VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , memory( memory_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryUnmapInfoKHR( MemoryUnmapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryUnmapInfoKHR( VkMemoryUnmapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryUnmapInfoKHR( *reinterpret_cast( &rhs ) ) {} + + MemoryUnmapInfoKHR & operator=( MemoryUnmapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryUnmapInfoKHR & operator=( VkMemoryUnmapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryUnmapInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryUnmapInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, memory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryUnmapInfoKHR const & ) const = default; +#else + bool operator==( MemoryUnmapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( MemoryUnmapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryUnmapInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + + template <> + struct CppType + { + using Type = MemoryUnmapInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct MemoryWin32HandlePropertiesKHR + { + using NativeType = VkMemoryWin32HandlePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryTypeBits( memoryTypeBits_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryWin32HandlePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; +# else + bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryWin32HandlePropertiesKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryZirconHandlePropertiesFUCHSIA + { + using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryTypeBits( memoryTypeBits_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default; +# else + bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryZirconHandlePropertiesFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct MetalSurfaceCreateInfoEXT + { + using NativeType = VkMetalSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, + const CAMetalLayer * pLayer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pLayer( pLayer_ ) + { + } + + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT + { + pLayer = pLayer_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pLayer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); +# endif + } + + bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer * pLayer = {}; + }; + + template <> + struct CppType + { + using Type = MetalSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + struct MicromapBuildInfoEXT + { + using NativeType = VkMicromapBuildInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild, + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , dstMicromap( dstMicromap_ ) + , usageCountsCount( usageCountsCount_ ) + , pUsageCounts( pUsageCounts_ ) + , ppUsageCounts( ppUsageCounts_ ) + , data( data_ ) + , scratchData( scratchData_ ) + , triangleArray( triangleArray_ ) + , triangleArrayStride( triangleArrayStride_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapBuildInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_, + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_, + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , dstMicromap( dstMicromap_ ) + , usageCountsCount( static_cast( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , data( data_ ) + , scratchData( scratchData_ ) + , triangleArray( triangleArray_ ) + , triangleArrayStride( triangleArrayStride_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); +# else + if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ ) VULKAN_HPP_NOEXCEPT + { + dstMicromap = dstMicromap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & + setTriangleArray( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & triangleArray_ ) VULKAN_HPP_NOEXCEPT + { + triangleArray = triangleArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ ) VULKAN_HPP_NOEXCEPT + { + triangleArrayStride = triangleArrayStride_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap; + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild; + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray = {}; + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride = {}; + }; + + template <> + struct CppType + { + using Type = MicromapBuildInfoEXT; + }; + + struct MicromapBuildSizesInfoEXT + { + using NativeType = VkMicromapBuildSizesInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildSizesInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 discardable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , micromapSize( micromapSize_ ) + , buildScratchSize( buildScratchSize_ ) + , discardable( discardable_ ) + { + } + + VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapBuildSizesInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ ) VULKAN_HPP_NOEXCEPT + { + micromapSize = micromapSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + buildScratchSize = buildScratchSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( VULKAN_HPP_NAMESPACE::Bool32 discardable_ ) VULKAN_HPP_NOEXCEPT + { + discardable = discardable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default; +#else + bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromapSize == rhs.micromapSize ) && ( buildScratchSize == rhs.buildScratchSize ) && + ( discardable == rhs.discardable ); +# endif + } + + bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildSizesInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize micromapSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 discardable = {}; + }; + + template <> + struct CppType + { + using Type = MicromapBuildSizesInfoEXT; + }; + + struct MicromapCreateInfoEXT + { + using NativeType = VkMicromapCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , createFlags( createFlags_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + , type( type_ ) + , deviceAddress( deviceAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ ) VULKAN_HPP_NOEXCEPT + { + createFlags = createFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapCreateInfoEXT const & ) const = default; +#else + bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + + template <> + struct CppType + { + using Type = MicromapCreateInfoEXT; + }; + + struct MicromapTriangleEXT + { + using NativeType = VkMicromapTriangleEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {} ) VULKAN_HPP_NOEXCEPT + : dataOffset( dataOffset_ ) + , subdivisionLevel( subdivisionLevel_ ) + , format( format_ ) + { + } + + VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapTriangleEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) VULKAN_HPP_NOEXCEPT + { + dataOffset = dataOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT + { + subdivisionLevel = subdivisionLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( dataOffset, subdivisionLevel, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapTriangleEXT const & ) const = default; +#else + bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( dataOffset == rhs.dataOffset ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format ); +# endif + } + + bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t dataOffset = {}; + uint16_t subdivisionLevel = {}; + uint16_t format = {}; + }; + + struct MicromapVersionInfoEXT + { + using NativeType = VkMicromapVersionInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapVersionInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pVersionData( pVersionData_ ) + { + } + + VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapVersionInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT + { + pVersionData = pVersionData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pVersionData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapVersionInfoEXT const & ) const = default; +#else + bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); +# endif + } + + bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapVersionInfoEXT; + const void * pNext = {}; + const uint8_t * pVersionData = {}; + }; + + template <> + struct CppType + { + using Type = MicromapVersionInfoEXT; + }; + + struct MultiDrawIndexedInfoEXT + { + using NativeType = VkMultiDrawIndexedInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : firstIndex( firstIndex_ ) + , indexCount( indexCount_ ) + , vertexOffset( vertexOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiDrawIndexedInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( firstIndex, indexCount, vertexOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default; +#else + bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) && ( vertexOffset == rhs.vertexOffset ); +# endif + } + + bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t firstIndex = {}; + uint32_t indexCount = {}; + int32_t vertexOffset = {}; + }; + + struct MultiDrawInfoEXT + { + using NativeType = VkMultiDrawInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT + : firstVertex( firstVertex_ ) + , vertexCount( vertexCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawInfoEXT( *reinterpret_cast( &rhs ) ) {} + + MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( firstVertex, vertexCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiDrawInfoEXT const & ) const = default; +#else + bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( firstVertex == rhs.firstVertex ) && ( vertexCount == rhs.vertexCount ); +# endif + } + + bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t firstVertex = {}; + uint32_t vertexCount = {}; + }; + + struct MultisamplePropertiesEXT + { + using NativeType = VkMultisamplePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisamplePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSampleLocationGridSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisamplePropertiesEXT const & ) const = default; +#else + bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); +# endif + } + + bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + }; + + template <> + struct CppType + { + using Type = MultisamplePropertiesEXT; + }; + + struct MultisampledRenderToSingleSampledInfoEXT + { + using NativeType = VkMultisampledRenderToSingleSampledInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MultisampledRenderToSingleSampledInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , multisampledRenderToSingleSampledEnable( multisampledRenderToSingleSampledEnable_ ) + , rasterizationSamples( rasterizationSamples_ ) + { + } + + VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & + setMultisampledRenderToSingleSampledEnable( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ ) VULKAN_HPP_NOEXCEPT + { + multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default; +#else + bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable ) && + ( rasterizationSamples == rhs.rasterizationSamples ); +# endif + } + + bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = MultisampledRenderToSingleSampledInfoEXT; + }; + + struct MultiviewPerViewAttributesInfoNVX + { + using NativeType = VkMultiviewPerViewAttributesInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , perViewAttributes( perViewAttributes_ ) + , perViewAttributesPositionXOnly( perViewAttributesPositionXOnly_ ) + { + } + + VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiviewPerViewAttributesInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT + { + perViewAttributes = perViewAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & + setPerViewAttributesPositionXOnly( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT + { + perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default; +#else + bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewAttributes == rhs.perViewAttributes ) && + ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly ); +# endif + } + + bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {}; + }; + + template <> + struct CppType + { + using Type = MultiviewPerViewAttributesInfoNVX; + }; + + struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM + { + using NativeType = VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( uint32_t perViewRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , perViewRenderAreaCount( perViewRenderAreaCount_ ) + , pPerViewRenderAreas( pPerViewRenderAreas_ ) + { + } + + VULKAN_HPP_CONSTEXPR + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & perViewRenderAreas_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), perViewRenderAreaCount( static_cast( perViewRenderAreas_.size() ) ), pPerViewRenderAreas( perViewRenderAreas_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + operator=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & operator=( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPerViewRenderAreaCount( uint32_t perViewRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + perViewRenderAreaCount = perViewRenderAreaCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPPerViewRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pPerViewRenderAreas = pPerViewRenderAreas_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPerViewRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & perViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + perViewRenderAreaCount = static_cast( perViewRenderAreas_.size() ); + pPerViewRenderAreas = perViewRenderAreas_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewRenderAreaCount, pPerViewRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & ) const = default; +#else + bool operator==( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewRenderAreaCount == rhs.perViewRenderAreaCount ) && + ( pPerViewRenderAreas == rhs.pPerViewRenderAreas ); +# endif + } + + bool operator!=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + const void * pNext = {}; + uint32_t perViewRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas = {}; + }; + + template <> + struct CppType + { + using Type = MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + }; + + struct MutableDescriptorTypeListEXT + { + using NativeType = VkMutableDescriptorTypeListEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( uint32_t descriptorTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorTypeCount( descriptorTypeCount_ ) + , pDescriptorTypes( pDescriptorTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeListEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorTypes_ ) + : descriptorTypeCount( static_cast( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = descriptorTypeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & + setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorTypes = pDescriptorTypes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListEXT & setDescriptorTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = static_cast( descriptorTypes_.size() ); + pDescriptorTypes = descriptorTypes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( descriptorTypeCount, pDescriptorTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default; +#else + bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes ); +# endif + } + + bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t descriptorTypeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {}; + }; + + using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT; + + struct MutableDescriptorTypeCreateInfoEXT + { + using NativeType = VkMutableDescriptorTypeCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( uint32_t mutableDescriptorTypeListCount_ = {}, + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ) + , pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ ) + { + } + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , mutableDescriptorTypeListCount( static_cast( mutableDescriptorTypeLists_.size() ) ) + , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & + setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & + setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeLists( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_ ) + VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = static_cast( mutableDescriptorTypeLists_.size() ); + pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default; +#else + bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) && + ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists ); +# endif + } + + bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoEXT; + const void * pNext = {}; + uint32_t mutableDescriptorTypeListCount = {}; + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists = {}; + }; + + template <> + struct CppType + { + using Type = MutableDescriptorTypeCreateInfoEXT; + }; + + using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; + + struct OpaqueCaptureDescriptorDataCreateInfoEXT + { + using NativeType = VkOpaqueCaptureDescriptorDataCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( const void * opaqueCaptureDescriptorData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , opaqueCaptureDescriptorData( opaqueCaptureDescriptorData_ ) + { + } + + VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpaqueCaptureDescriptorDataCreateInfoEXT( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : OpaqueCaptureDescriptorDataCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & + setOpaqueCaptureDescriptorData( const void * opaqueCaptureDescriptorData_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureDescriptorData = opaqueCaptureDescriptorData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpaqueCaptureDescriptorDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureDescriptorData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpaqueCaptureDescriptorDataCreateInfoEXT const & ) const = default; +#else + bool operator==( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureDescriptorData == rhs.opaqueCaptureDescriptorData ); +# endif + } + + bool operator!=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT; + const void * pNext = {}; + const void * opaqueCaptureDescriptorData = {}; + }; + + template <> + struct CppType + { + using Type = OpaqueCaptureDescriptorDataCreateInfoEXT; + }; + + struct OpticalFlowExecuteInfoNV + { + using NativeType = VkOpticalFlowExecuteInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowExecuteInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowExecuteInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + OpticalFlowExecuteInfoNV & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowExecuteInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowExecuteInfoNV; + }; + + struct OpticalFlowImageFormatInfoNV + { + using NativeType = VkOpticalFlowImageFormatInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , usage( usage_ ) + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowImageFormatInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowImageFormatInfoNV; + }; + + struct OpticalFlowImageFormatPropertiesNV + { + using NativeType = VkOpticalFlowImageFormatPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , format( format_ ) + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ); +# endif + } + + bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatPropertiesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = OpticalFlowImageFormatPropertiesNV; + }; + + struct OpticalFlowSessionCreateInfoNV + { + using NativeType = VkOpticalFlowSessionCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( + uint32_t width_ = {}, + uint32_t height_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format costFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , width( width_ ) + , height( height_ ) + , imageFormat( imageFormat_ ) + , flowVectorFormat( flowVectorFormat_ ) + , costFormat( costFormat_ ) + , outputGridSize( outputGridSize_ ) + , hintGridSize( hintGridSize_ ) + , performanceLevel( performanceLevel_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowSessionCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ ) VULKAN_HPP_NOEXCEPT + { + flowVectorFormat = flowVectorFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( VULKAN_HPP_NAMESPACE::Format costFormat_ ) VULKAN_HPP_NOEXCEPT + { + costFormat = costFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setOutputGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ ) VULKAN_HPP_NOEXCEPT + { + outputGridSize = outputGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setHintGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ ) VULKAN_HPP_NOEXCEPT + { + hintGridSize = hintGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setPerformanceLevel( VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ ) VULKAN_HPP_NOEXCEPT + { + performanceLevel = performanceLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( width == rhs.width ) && ( height == rhs.height ) && ( imageFormat == rhs.imageFormat ) && + ( flowVectorFormat == rhs.flowVectorFormat ) && ( costFormat == rhs.costFormat ) && ( outputGridSize == rhs.outputGridSize ) && + ( hintGridSize == rhs.hintGridSize ) && ( performanceLevel == rhs.performanceLevel ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreateInfoNV; + void * pNext = {}; + uint32_t width = {}; + uint32_t height = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format flowVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format costFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown; + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowSessionCreateInfoNV; + }; + + struct OpticalFlowSessionCreatePrivateDataInfoNV + { + using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( uint32_t id_ = {}, + uint32_t size_ = {}, + const void * pPrivateData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , id( id_ ) + , size( size_ ) + , pPrivateData( pPrivateData_ ) + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT + { + id = id_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) VULKAN_HPP_NOEXCEPT + { + pPrivateData = pPrivateData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, id, size, pPrivateData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( size == rhs.size ) && ( pPrivateData == rhs.pPrivateData ); +# endif + } + + bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; + void * pNext = {}; + uint32_t id = {}; + uint32_t size = {}; + const void * pPrivateData = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowSessionCreatePrivateDataInfoNV; + }; + + struct OutOfBandQueueTypeInfoNV + { + using NativeType = VkOutOfBandQueueTypeInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOutOfBandQueueTypeInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , queueType( queueType_ ) + { + } + + VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OutOfBandQueueTypeInfoNV( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OutOfBandQueueTypeInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OutOfBandQueueTypeInfoNV & operator=( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + OutOfBandQueueTypeInfoNV & operator=( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setQueueType( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ ) VULKAN_HPP_NOEXCEPT + { + queueType = queueType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkOutOfBandQueueTypeInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOutOfBandQueueTypeInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OutOfBandQueueTypeInfoNV const & ) const = default; +#else + bool operator==( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueType == rhs.queueType ); +# endif + } + + bool operator!=( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOutOfBandQueueTypeInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender; + }; + + template <> + struct CppType + { + using Type = OutOfBandQueueTypeInfoNV; + }; + + struct PastPresentationTimingGOOGLE + { + using NativeType = VkPastPresentationTimingGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {}, + uint64_t actualPresentTime_ = {}, + uint64_t earliestPresentTime_ = {}, + uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + , actualPresentTime( actualPresentTime_ ) + , earliestPresentTime( earliestPresentTime_ ) + , presentMargin( presentMargin_ ) + { + } + + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PastPresentationTimingGOOGLE( *reinterpret_cast( &rhs ) ) + { + } + + PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; +#else + bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && ( actualPresentTime == rhs.actualPresentTime ) && + ( earliestPresentTime == rhs.earliestPresentTime ) && ( presentMargin == rhs.presentMargin ); +# endif + } + + bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; + uint64_t earliestPresentTime = {}; + uint64_t presentMargin = {}; + }; + + struct PerformanceConfigurationAcquireInfoINTEL + { + using NativeType = VkPerformanceConfigurationAcquireInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + { + } + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); +# endif + } + + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + }; + + template <> + struct CppType + { + using Type = PerformanceConfigurationAcquireInfoINTEL; + }; + + struct PerformanceCounterDescriptionKHR + { + using NativeType = VkPerformanceCounterDescriptionKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, + std::array const & name_ = {}, + std::array const & category_ = {}, + std::array const & description_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , name( name_ ) + , category( category_ ) + , description( description_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, name, category, description ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default; +#else + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && ( category == rhs.category ) && + ( description == rhs.description ); +# endif + } + + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceCounterDescriptionKHR; + }; + + struct PerformanceCounterKHR + { + using NativeType = VkPerformanceCounterKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, + std::array const & uuid_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , unit( unit_ ) + , scope( scope_ ) + , storage( storage_ ) + , uuid( uuid_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, unit, scope, storage, uuid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterKHR const & ) const = default; +#else + bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && ( storage == rhs.storage ) && + ( uuid == rhs.uuid ); +# endif + } + + bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceCounterKHR; + }; + + union PerformanceCounterResultKHR + { + using NativeType = VkPerformanceCounterResultKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT + { + int64 = int64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT + { + uint64 = uint64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT + { + float64 = float64_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkPerformanceCounterResultKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterResultKHR &() + { + return *reinterpret_cast( this ); + } + + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; + }; + + struct PerformanceMarkerInfoINTEL + { + using NativeType = VkPerformanceMarkerInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , marker( marker_ ) + { + } + + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void * pNext = {}; + uint64_t marker = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceMarkerInfoINTEL; + }; + + struct PerformanceOverrideInfoINTEL + { + using NativeType = VkPerformanceOverrideInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, + VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, + uint64_t parameter_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + , enable( enable_ ) + , parameter( parameter_ ) + { + } + + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceOverrideInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + { + enable = enable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT + { + parameter = parameter_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, enable, parameter ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && ( parameter == rhs.parameter ); +# endif + } + + bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceOverrideInfoINTEL; + }; + + struct PerformanceQuerySubmitInfoKHR + { + using NativeType = VkPerformanceQuerySubmitInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , counterPassIndex( counterPassIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT + { + counterPassIndex = counterPassIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, counterPassIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; +#else + bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); +# endif + } + + bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; + const void * pNext = {}; + uint32_t counterPassIndex = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceQuerySubmitInfoKHR; + }; + + struct PerformanceStreamMarkerInfoINTEL + { + using NativeType = VkPerformanceStreamMarkerInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , marker( marker_ ) + { + } + + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void * pNext = {}; + uint32_t marker = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceStreamMarkerInfoINTEL; + }; + + union PerformanceValueDataINTEL + { + using NativeType = VkPerformanceValueDataINTEL; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT + { + value32 = value32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT + { + value64 = value64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT + { + valueFloat = valueFloat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT + { + valueBool = valueBool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT + { + valueString = valueString_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkPerformanceValueDataINTEL const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueDataINTEL &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + uint32_t value32; + uint64_t value64; + float valueFloat; + VULKAN_HPP_NAMESPACE::Bool32 valueBool; + const char * valueString; +#else + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char * valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PerformanceValueINTEL + { + using NativeType = VkPerformanceValueINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , data( data_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceValueINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; + }; + + struct PhysicalDevice16BitStorageFeatures + { + using NativeType = VkPhysicalDevice16BitStorageFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice16BitStorageFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ); +# endif + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice16BitStorageFeatures; + }; + + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + struct PhysicalDevice4444FormatsFeaturesEXT + { + using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , formatA4R4G4B4( formatA4R4G4B4_ ) + , formatA4B4G4R4( formatA4B4G4R4_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4R4G4B4 = formatA4R4G4B4_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4B4G4R4 = formatA4B4G4R4_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); +# endif + } + + bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice4444FormatsFeaturesEXT; + }; + + struct PhysicalDevice8BitStorageFeatures + { + using NativeType = VkPhysicalDevice8BitStorageFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice8BitStorageFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ); +# endif + } + + bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice8BitStorageFeatures; + }; + + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + + struct PhysicalDeviceASTCDecodeFeaturesEXT + { + using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , decodeModeSharedExponent( decodeModeSharedExponent_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & + setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + { + decodeModeSharedExponent = decodeModeSharedExponent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decodeModeSharedExponent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); +# endif + } + + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + }; + + struct PhysicalDeviceAccelerationStructureFeaturesKHR + { + using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructureFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , accelerationStructure( accelerationStructure_ ) + , accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ) + , accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ) + , accelerationStructureHostCommands( accelerationStructureHostCommands_ ) + , descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureHostCommands = accelerationStructureHostCommands_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + accelerationStructure, + accelerationStructureCaptureReplay, + accelerationStructureIndirectBuild, + accelerationStructureHostCommands, + descriptorBindingAccelerationStructureUpdateAfterBind ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) && + ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) && + ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) && + ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind ); +# endif + } + + bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; + }; + + struct PhysicalDeviceAccelerationStructurePropertiesKHR + { + using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxPrimitiveCount_ = {}, + uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, + uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxPrimitiveCount( maxPrimitiveCount_ ) + , maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ) + , maxPerStageDescriptorUpdateAfterBindAccelerationStructures( maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + , maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ) + , minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxGeometryCount, + maxInstanceCount, + maxPrimitiveCount, + maxPerStageDescriptorAccelerationStructures, + maxPerStageDescriptorUpdateAfterBindAccelerationStructures, + maxDescriptorSetAccelerationStructures, + maxDescriptorSetUpdateAfterBindAccelerationStructures, + minAccelerationStructureScratchOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && + ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && + ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) && + ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && + ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) && + ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + void * pNext = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxPerStageDescriptorAccelerationStructures = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; + uint32_t minAccelerationStructureScratchOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; + }; + + struct PhysicalDeviceAddressBindingReportFeaturesEXT + { + using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , reportAddressBinding( reportAddressBinding_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & + setReportAddressBinding( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ ) VULKAN_HPP_NOEXCEPT + { + reportAddressBinding = reportAddressBinding_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, reportAddressBinding ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reportAddressBinding == rhs.reportAddressBinding ); +# endif + } + + bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAddressBindingReportFeaturesEXT; + }; + + struct PhysicalDeviceAmigoProfilingFeaturesSEC + { + using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , amigoProfiling( amigoProfiling_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ ) VULKAN_HPP_NOEXCEPT + { + amigoProfiling = amigoProfiling_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, amigoProfiling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default; +#else + bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( amigoProfiling == rhs.amigoProfiling ); +# endif + } + + bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAmigoProfilingFeaturesSEC; + }; + + struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , attachmentFeedbackLoopDynamicState( attachmentFeedbackLoopDynamicState_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + operator=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + setAttachmentFeedbackLoopDynamicState( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFeedbackLoopDynamicState = attachmentFeedbackLoopDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentFeedbackLoopDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopDynamicState == rhs.attachmentFeedbackLoopDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + }; + + struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT + { + using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , attachmentFeedbackLoopLayout( attachmentFeedbackLoopLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & + operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & + setAttachmentFeedbackLoopLayout( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentFeedbackLoopLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout ); +# endif + } + + bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + }; + + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, advancedBlendCoherentOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); +# endif + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + }; + + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) + , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) + , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) + , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) + , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) + , advancedBlendAllOperations( advancedBlendAllOperations_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + advancedBlendMaxColorAttachments, + advancedBlendIndependentBlend, + advancedBlendNonPremultipliedSrcColor, + advancedBlendNonPremultipliedDstColor, + advancedBlendCorrelatedOverlap, + advancedBlendAllOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && + ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && + ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && + ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && + ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); +# endif + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + void * pNext = {}; + uint32_t advancedBlendMaxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + }; + + struct PhysicalDeviceBorderColorSwizzleFeaturesEXT + { + using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , borderColorSwizzle( borderColorSwizzle_ ) + , borderColorSwizzleFromImage( borderColorSwizzleFromImage_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & + setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + borderColorSwizzle = borderColorSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & + setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT + { + borderColorSwizzleFromImage = borderColorSwizzleFromImage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( borderColorSwizzle == rhs.borderColorSwizzle ) && + ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage ); +# endif + } + + bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT; + }; + + struct PhysicalDeviceBufferDeviceAddressFeatures + { + using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); +# endif + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeatures; + }; + + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT + { + using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); +# endif + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + }; + + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , clustercullingShader( clustercullingShader_ ) + , multiviewClusterCullingShader( multiviewClusterCullingShader_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & + setClustercullingShader( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ ) VULKAN_HPP_NOEXCEPT + { + clustercullingShader = clustercullingShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & + setMultiviewClusterCullingShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewClusterCullingShader = multiviewClusterCullingShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, clustercullingShader, multiviewClusterCullingShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clustercullingShader == rhs.clustercullingShader ) && + ( multiviewClusterCullingShader == rhs.multiviewClusterCullingShader ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + }; + + struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( std::array const & maxWorkGroupCount_ = {}, + std::array const & maxWorkGroupSize_ = {}, + uint32_t maxOutputClusterCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxWorkGroupCount( maxWorkGroupCount_ ) + , maxWorkGroupSize( maxWorkGroupSize_ ) + , maxOutputClusterCount( maxOutputClusterCount_ ) + , indirectBufferOffsetAlignment( indirectBufferOffsetAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & + operator=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxWorkGroupCount, maxWorkGroupSize, maxOutputClusterCount, indirectBufferOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWorkGroupCount == rhs.maxWorkGroupCount ) && + ( maxWorkGroupSize == rhs.maxWorkGroupSize ) && ( maxOutputClusterCount == rhs.maxOutputClusterCount ) && + ( indirectBufferOffsetAlignment == rhs.indirectBufferOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxWorkGroupCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxWorkGroupSize = {}; + uint32_t maxOutputClusterCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + }; + + struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , clusterShadingRate( clusterShadingRate_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & + operator=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & + setClusterShadingRate( VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + clusterShadingRate = clusterShadingRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, clusterShadingRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterShadingRate == rhs.clusterShadingRate ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + }; + + struct PhysicalDeviceCoherentMemoryFeaturesAMD + { + using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceCoherentMemory( deviceCoherentMemory_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & + setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + { + deviceCoherentMemory = deviceCoherentMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceCoherentMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); +# endif + } + + bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + }; + + struct PhysicalDeviceColorWriteEnableFeaturesEXT + { + using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , colorWriteEnable( colorWriteEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & + setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteEnable = colorWriteEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorWriteEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable ); +# endif + } + + bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; + }; + + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV + { + using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) + , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && + ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); +# endif + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; + }; + + struct PhysicalDeviceConditionalRenderingFeaturesEXT + { + using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , conditionalRendering( conditionalRendering_ ) + , inheritedConditionalRendering( inheritedConditionalRendering_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRendering = conditionalRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + inheritedConditionalRendering = inheritedConditionalRendering_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && + ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); +# endif + } + + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + }; + + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, + float maxExtraPrimitiveOverestimationSize_ = {}, + float extraPrimitiveOverestimationSizeGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , primitiveOverestimationSize( primitiveOverestimationSize_ ) + , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) + , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) + , primitiveUnderestimation( primitiveUnderestimation_ ) + , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) + , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) + , degenerateLinesRasterized( degenerateLinesRasterized_ ) + , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) + , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + primitiveOverestimationSize, + maxExtraPrimitiveOverestimationSize, + extraPrimitiveOverestimationSizeGranularity, + primitiveUnderestimation, + conservativePointAndLineRasterization, + degenerateTrianglesRasterized, + degenerateLinesRasterized, + fullyCoveredFragmentShaderInputVariable, + conservativeRasterizationPostDepthCoverage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && + ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && + ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && + ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && + ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && + ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && + ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && + ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); +# endif + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + void * pNext = {}; + float primitiveOverestimationSize = {}; + float maxExtraPrimitiveOverestimationSize = {}; + float extraPrimitiveOverestimationSizeGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceCooperativeMatrixFeaturesKHR + { + using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cooperativeMatrix( cooperativeMatrix_ ) + , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesKHR( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & + setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR; + }; + + struct PhysicalDeviceCooperativeMatrixFeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cooperativeMatrix( cooperativeMatrix_ ) + , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + }; + + struct PhysicalDeviceCooperativeMatrixPropertiesKHR + { + using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR; + }; + + struct PhysicalDeviceCooperativeMatrixPropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; + }; + + struct PhysicalDeviceCopyMemoryIndirectFeaturesNV + { + using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , indirectCopy( indirectCopy_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCopyMemoryIndirectFeaturesNV( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCopyMemoryIndirectFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setIndirectCopy( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ ) VULKAN_HPP_NOEXCEPT + { + indirectCopy = indirectCopy_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indirectCopy ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectCopy == rhs.indirectCopy ); +# endif + } + + bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indirectCopy = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV; + }; + + struct PhysicalDeviceCopyMemoryIndirectPropertiesNV + { + using NativeType = VkPhysicalDeviceCopyMemoryIndirectPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , supportedQueues( supportedQueues_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCopyMemoryIndirectPropertiesNV( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCopyMemoryIndirectPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedQueues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedQueues == rhs.supportedQueues ); +# endif + } + + bool operator!=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCopyMemoryIndirectPropertiesNV; + }; + + struct PhysicalDeviceCornerSampledImageFeaturesNV + { + using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cornerSampledImage( cornerSampledImage_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & + setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + cornerSampledImage = cornerSampledImage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cornerSampledImage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); +# endif + } + + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + }; + + struct PhysicalDeviceCoverageReductionModeFeaturesNV + { + using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , coverageReductionMode( coverageReductionMode_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, coverageReductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); +# endif + } + + bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + }; + + struct PhysicalDeviceCubicClampFeaturesQCOM + { + using NativeType = VkPhysicalDeviceCubicClampFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cubicRangeClamp( cubicRangeClamp_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCubicClampFeaturesQCOM( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCubicClampFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCubicClampFeaturesQCOM & operator=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCubicClampFeaturesQCOM & operator=( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setCubicRangeClamp( VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ ) VULKAN_HPP_NOEXCEPT + { + cubicRangeClamp = cubicRangeClamp_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCubicClampFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicClampFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cubicRangeClamp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCubicClampFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicRangeClamp == rhs.cubicRangeClamp ); +# endif + } + + bool operator!=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCubicClampFeaturesQCOM; + }; + + struct PhysicalDeviceCubicWeightsFeaturesQCOM + { + using NativeType = VkPhysicalDeviceCubicWeightsFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , selectableCubicWeights( selectableCubicWeights_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCubicWeightsFeaturesQCOM( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCubicWeightsFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & + setSelectableCubicWeights( VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ ) VULKAN_HPP_NOEXCEPT + { + selectableCubicWeights = selectableCubicWeights_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicWeightsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, selectableCubicWeights ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCubicWeightsFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( selectableCubicWeights == rhs.selectableCubicWeights ); +# endif + } + + bool operator!=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCubicWeightsFeaturesQCOM; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceCudaKernelLaunchFeaturesNV + { + using NativeType = VkPhysicalDeviceCudaKernelLaunchFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cudaKernelLaunchFeatures( cudaKernelLaunchFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCudaKernelLaunchFeaturesNV( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCudaKernelLaunchFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & + setCudaKernelLaunchFeatures( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ ) VULKAN_HPP_NOEXCEPT + { + cudaKernelLaunchFeatures = cudaKernelLaunchFeatures_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cudaKernelLaunchFeatures ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCudaKernelLaunchFeaturesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cudaKernelLaunchFeatures == rhs.cudaKernelLaunchFeatures ); +# endif + } + + bool operator!=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceCudaKernelLaunchPropertiesNV + { + using NativeType = VkPhysicalDeviceCudaKernelLaunchPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( uint32_t computeCapabilityMinor_ = {}, + uint32_t computeCapabilityMajor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , computeCapabilityMinor( computeCapabilityMinor_ ) + , computeCapabilityMajor( computeCapabilityMajor_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCudaKernelLaunchPropertiesNV( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCudaKernelLaunchPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, computeCapabilityMinor, computeCapabilityMajor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCudaKernelLaunchPropertiesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeCapabilityMinor == rhs.computeCapabilityMinor ) && + ( computeCapabilityMajor == rhs.computeCapabilityMajor ); +# endif + } + + bool operator!=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV; + void * pNext = {}; + uint32_t computeCapabilityMinor = {}; + uint32_t computeCapabilityMajor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDeviceCustomBorderColorFeaturesEXT + { + using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , customBorderColors( customBorderColors_ ) + , customBorderColorWithoutFormat( customBorderColorWithoutFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColors = customBorderColors_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColorWithoutFormat = customBorderColorWithoutFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) && + ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); +# endif + } + + bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + }; + + struct PhysicalDeviceCustomBorderColorPropertiesEXT + { + using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxCustomBorderColorSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); +# endif + } + + bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + void * pNext = {}; + uint32_t maxCustomBorderColorSamplers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; + }; + + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + { + using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocationImageAliasing ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); +# endif + } + + bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + }; + + struct PhysicalDeviceDepthBiasControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthBiasControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , depthBiasControl( depthBiasControl_ ) + , leastRepresentableValueForceUnormRepresentation( leastRepresentableValueForceUnormRepresentation_ ) + , floatRepresentation( floatRepresentation_ ) + , depthBiasExact( depthBiasExact_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthBiasControlFeaturesEXT( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthBiasControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setDepthBiasControl( VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasControl = depthBiasControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setLeastRepresentableValueForceUnormRepresentation( VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation_ ) VULKAN_HPP_NOEXCEPT + { + leastRepresentableValueForceUnormRepresentation = leastRepresentableValueForceUnormRepresentation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setFloatRepresentation( VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ ) VULKAN_HPP_NOEXCEPT + { + floatRepresentation = floatRepresentation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasExact( VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasExact = depthBiasExact_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthBiasControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthBiasControl, leastRepresentableValueForceUnormRepresentation, floatRepresentation, depthBiasExact ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthBiasControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasControl == rhs.depthBiasControl ) && + ( leastRepresentableValueForceUnormRepresentation == rhs.leastRepresentableValueForceUnormRepresentation ) && + ( floatRepresentation == rhs.floatRepresentation ) && ( depthBiasExact == rhs.depthBiasExact ); +# endif + } + + bool operator!=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation = {}; + VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthBiasControlFeaturesEXT; + }; + + struct PhysicalDeviceDepthClampZeroOneFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , depthClampZeroOne( depthClampZeroOne_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClampZeroOneFeaturesEXT( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClampZeroOneFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesEXT & + setDepthClampZeroOne( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ ) VULKAN_HPP_NOEXCEPT + { + depthClampZeroOne = depthClampZeroOne_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampZeroOneFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClampZeroOne ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampZeroOne == rhs.depthClampZeroOne ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClampZeroOneFeaturesEXT; + }; + + struct PhysicalDeviceDepthClipControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , depthClipControl( depthClipControl_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & + setDepthClipControl( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT + { + depthClipControl = depthClipControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClipControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipControl == rhs.depthClipControl ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipControlFeaturesEXT; + }; + + struct PhysicalDeviceDepthClipEnableFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , depthClipEnable( depthClipEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClipEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + }; + + struct PhysicalDeviceDepthStencilResolveProperties + { + using NativeType = VkPhysicalDeviceDepthStencilResolveProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ); +# endif + } + + bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthStencilResolveProperties; + }; + + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + + struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( size_t combinedImageSamplerDensityMapDescriptorSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , combinedImageSamplerDensityMapDescriptorSize( combinedImageSamplerDensityMapDescriptorSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & + operator=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDensityMapDescriptorSize == rhs.combinedImageSamplerDensityMapDescriptorSize ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + void * pNext = {}; + size_t combinedImageSamplerDensityMapDescriptorSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + }; + + struct PhysicalDeviceDescriptorBufferFeaturesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , descriptorBuffer( descriptorBuffer_ ) + , descriptorBufferCaptureReplay( descriptorBufferCaptureReplay_ ) + , descriptorBufferImageLayoutIgnored( descriptorBufferImageLayoutIgnored_ ) + , descriptorBufferPushDescriptors( descriptorBufferPushDescriptors_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBuffer( VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBuffer = descriptorBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferCaptureReplay = descriptorBufferCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferImageLayoutIgnored( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferImageLayoutIgnored = descriptorBufferImageLayoutIgnored_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferPushDescriptors( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferPushDescriptors = descriptorBufferPushDescriptors_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorBuffer, descriptorBufferCaptureReplay, descriptorBufferImageLayoutIgnored, descriptorBufferPushDescriptors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorBuffer == rhs.descriptorBuffer ) && + ( descriptorBufferCaptureReplay == rhs.descriptorBufferCaptureReplay ) && + ( descriptorBufferImageLayoutIgnored == rhs.descriptorBufferImageLayoutIgnored ) && + ( descriptorBufferPushDescriptors == rhs.descriptorBufferPushDescriptors ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferFeaturesEXT; + }; + + struct PhysicalDeviceDescriptorBufferPropertiesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment_ = {}, + uint32_t maxDescriptorBufferBindings_ = {}, + uint32_t maxResourceDescriptorBufferBindings_ = {}, + uint32_t maxSamplerDescriptorBufferBindings_ = {}, + uint32_t maxEmbeddedImmutableSamplerBindings_ = {}, + uint32_t maxEmbeddedImmutableSamplers_ = {}, + size_t bufferCaptureReplayDescriptorDataSize_ = {}, + size_t imageCaptureReplayDescriptorDataSize_ = {}, + size_t imageViewCaptureReplayDescriptorDataSize_ = {}, + size_t samplerCaptureReplayDescriptorDataSize_ = {}, + size_t accelerationStructureCaptureReplayDescriptorDataSize_ = {}, + size_t samplerDescriptorSize_ = {}, + size_t combinedImageSamplerDescriptorSize_ = {}, + size_t sampledImageDescriptorSize_ = {}, + size_t storageImageDescriptorSize_ = {}, + size_t uniformTexelBufferDescriptorSize_ = {}, + size_t robustUniformTexelBufferDescriptorSize_ = {}, + size_t storageTexelBufferDescriptorSize_ = {}, + size_t robustStorageTexelBufferDescriptorSize_ = {}, + size_t uniformBufferDescriptorSize_ = {}, + size_t robustUniformBufferDescriptorSize_ = {}, + size_t storageBufferDescriptorSize_ = {}, + size_t robustStorageBufferDescriptorSize_ = {}, + size_t inputAttachmentDescriptorSize_ = {}, + size_t accelerationStructureDescriptorSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , combinedImageSamplerDescriptorSingleArray( combinedImageSamplerDescriptorSingleArray_ ) + , bufferlessPushDescriptors( bufferlessPushDescriptors_ ) + , allowSamplerImageViewPostSubmitCreation( allowSamplerImageViewPostSubmitCreation_ ) + , descriptorBufferOffsetAlignment( descriptorBufferOffsetAlignment_ ) + , maxDescriptorBufferBindings( maxDescriptorBufferBindings_ ) + , maxResourceDescriptorBufferBindings( maxResourceDescriptorBufferBindings_ ) + , maxSamplerDescriptorBufferBindings( maxSamplerDescriptorBufferBindings_ ) + , maxEmbeddedImmutableSamplerBindings( maxEmbeddedImmutableSamplerBindings_ ) + , maxEmbeddedImmutableSamplers( maxEmbeddedImmutableSamplers_ ) + , bufferCaptureReplayDescriptorDataSize( bufferCaptureReplayDescriptorDataSize_ ) + , imageCaptureReplayDescriptorDataSize( imageCaptureReplayDescriptorDataSize_ ) + , imageViewCaptureReplayDescriptorDataSize( imageViewCaptureReplayDescriptorDataSize_ ) + , samplerCaptureReplayDescriptorDataSize( samplerCaptureReplayDescriptorDataSize_ ) + , accelerationStructureCaptureReplayDescriptorDataSize( accelerationStructureCaptureReplayDescriptorDataSize_ ) + , samplerDescriptorSize( samplerDescriptorSize_ ) + , combinedImageSamplerDescriptorSize( combinedImageSamplerDescriptorSize_ ) + , sampledImageDescriptorSize( sampledImageDescriptorSize_ ) + , storageImageDescriptorSize( storageImageDescriptorSize_ ) + , uniformTexelBufferDescriptorSize( uniformTexelBufferDescriptorSize_ ) + , robustUniformTexelBufferDescriptorSize( robustUniformTexelBufferDescriptorSize_ ) + , storageTexelBufferDescriptorSize( storageTexelBufferDescriptorSize_ ) + , robustStorageTexelBufferDescriptorSize( robustStorageTexelBufferDescriptorSize_ ) + , uniformBufferDescriptorSize( uniformBufferDescriptorSize_ ) + , robustUniformBufferDescriptorSize( robustUniformBufferDescriptorSize_ ) + , storageBufferDescriptorSize( storageBufferDescriptorSize_ ) + , robustStorageBufferDescriptorSize( robustStorageBufferDescriptorSize_ ) + , inputAttachmentDescriptorSize( inputAttachmentDescriptorSize_ ) + , accelerationStructureDescriptorSize( accelerationStructureDescriptorSize_ ) + , maxSamplerDescriptorBufferRange( maxSamplerDescriptorBufferRange_ ) + , maxResourceDescriptorBufferRange( maxResourceDescriptorBufferRange_ ) + , samplerDescriptorBufferAddressSpaceSize( samplerDescriptorBufferAddressSpaceSize_ ) + , resourceDescriptorBufferAddressSpaceSize( resourceDescriptorBufferAddressSpaceSize_ ) + , descriptorBufferAddressSpaceSize( descriptorBufferAddressSpaceSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferPropertiesEXT( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + combinedImageSamplerDescriptorSingleArray, + bufferlessPushDescriptors, + allowSamplerImageViewPostSubmitCreation, + descriptorBufferOffsetAlignment, + maxDescriptorBufferBindings, + maxResourceDescriptorBufferBindings, + maxSamplerDescriptorBufferBindings, + maxEmbeddedImmutableSamplerBindings, + maxEmbeddedImmutableSamplers, + bufferCaptureReplayDescriptorDataSize, + imageCaptureReplayDescriptorDataSize, + imageViewCaptureReplayDescriptorDataSize, + samplerCaptureReplayDescriptorDataSize, + accelerationStructureCaptureReplayDescriptorDataSize, + samplerDescriptorSize, + combinedImageSamplerDescriptorSize, + sampledImageDescriptorSize, + storageImageDescriptorSize, + uniformTexelBufferDescriptorSize, + robustUniformTexelBufferDescriptorSize, + storageTexelBufferDescriptorSize, + robustStorageTexelBufferDescriptorSize, + uniformBufferDescriptorSize, + robustUniformBufferDescriptorSize, + storageBufferDescriptorSize, + robustStorageBufferDescriptorSize, + inputAttachmentDescriptorSize, + accelerationStructureDescriptorSize, + maxSamplerDescriptorBufferRange, + maxResourceDescriptorBufferRange, + samplerDescriptorBufferAddressSpaceSize, + resourceDescriptorBufferAddressSpaceSize, + descriptorBufferAddressSpaceSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDescriptorSingleArray == rhs.combinedImageSamplerDescriptorSingleArray ) && + ( bufferlessPushDescriptors == rhs.bufferlessPushDescriptors ) && + ( allowSamplerImageViewPostSubmitCreation == rhs.allowSamplerImageViewPostSubmitCreation ) && + ( descriptorBufferOffsetAlignment == rhs.descriptorBufferOffsetAlignment ) && ( maxDescriptorBufferBindings == rhs.maxDescriptorBufferBindings ) && + ( maxResourceDescriptorBufferBindings == rhs.maxResourceDescriptorBufferBindings ) && + ( maxSamplerDescriptorBufferBindings == rhs.maxSamplerDescriptorBufferBindings ) && + ( maxEmbeddedImmutableSamplerBindings == rhs.maxEmbeddedImmutableSamplerBindings ) && + ( maxEmbeddedImmutableSamplers == rhs.maxEmbeddedImmutableSamplers ) && + ( bufferCaptureReplayDescriptorDataSize == rhs.bufferCaptureReplayDescriptorDataSize ) && + ( imageCaptureReplayDescriptorDataSize == rhs.imageCaptureReplayDescriptorDataSize ) && + ( imageViewCaptureReplayDescriptorDataSize == rhs.imageViewCaptureReplayDescriptorDataSize ) && + ( samplerCaptureReplayDescriptorDataSize == rhs.samplerCaptureReplayDescriptorDataSize ) && + ( accelerationStructureCaptureReplayDescriptorDataSize == rhs.accelerationStructureCaptureReplayDescriptorDataSize ) && + ( samplerDescriptorSize == rhs.samplerDescriptorSize ) && ( combinedImageSamplerDescriptorSize == rhs.combinedImageSamplerDescriptorSize ) && + ( sampledImageDescriptorSize == rhs.sampledImageDescriptorSize ) && ( storageImageDescriptorSize == rhs.storageImageDescriptorSize ) && + ( uniformTexelBufferDescriptorSize == rhs.uniformTexelBufferDescriptorSize ) && + ( robustUniformTexelBufferDescriptorSize == rhs.robustUniformTexelBufferDescriptorSize ) && + ( storageTexelBufferDescriptorSize == rhs.storageTexelBufferDescriptorSize ) && + ( robustStorageTexelBufferDescriptorSize == rhs.robustStorageTexelBufferDescriptorSize ) && + ( uniformBufferDescriptorSize == rhs.uniformBufferDescriptorSize ) && + ( robustUniformBufferDescriptorSize == rhs.robustUniformBufferDescriptorSize ) && + ( storageBufferDescriptorSize == rhs.storageBufferDescriptorSize ) && + ( robustStorageBufferDescriptorSize == rhs.robustStorageBufferDescriptorSize ) && + ( inputAttachmentDescriptorSize == rhs.inputAttachmentDescriptorSize ) && + ( accelerationStructureDescriptorSize == rhs.accelerationStructureDescriptorSize ) && + ( maxSamplerDescriptorBufferRange == rhs.maxSamplerDescriptorBufferRange ) && + ( maxResourceDescriptorBufferRange == rhs.maxResourceDescriptorBufferRange ) && + ( samplerDescriptorBufferAddressSpaceSize == rhs.samplerDescriptorBufferAddressSpaceSize ) && + ( resourceDescriptorBufferAddressSpaceSize == rhs.resourceDescriptorBufferAddressSpaceSize ) && + ( descriptorBufferAddressSpaceSize == rhs.descriptorBufferAddressSpaceSize ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation = {}; + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment = {}; + uint32_t maxDescriptorBufferBindings = {}; + uint32_t maxResourceDescriptorBufferBindings = {}; + uint32_t maxSamplerDescriptorBufferBindings = {}; + uint32_t maxEmbeddedImmutableSamplerBindings = {}; + uint32_t maxEmbeddedImmutableSamplers = {}; + size_t bufferCaptureReplayDescriptorDataSize = {}; + size_t imageCaptureReplayDescriptorDataSize = {}; + size_t imageViewCaptureReplayDescriptorDataSize = {}; + size_t samplerCaptureReplayDescriptorDataSize = {}; + size_t accelerationStructureCaptureReplayDescriptorDataSize = {}; + size_t samplerDescriptorSize = {}; + size_t combinedImageSamplerDescriptorSize = {}; + size_t sampledImageDescriptorSize = {}; + size_t storageImageDescriptorSize = {}; + size_t uniformTexelBufferDescriptorSize = {}; + size_t robustUniformTexelBufferDescriptorSize = {}; + size_t storageTexelBufferDescriptorSize = {}; + size_t robustStorageTexelBufferDescriptorSize = {}; + size_t uniformBufferDescriptorSize = {}; + size_t robustUniformBufferDescriptorSize = {}; + size_t storageBufferDescriptorSize = {}; + size_t robustStorageBufferDescriptorSize = {}; + size_t inputAttachmentDescriptorSize = {}; + size_t accelerationStructureDescriptorSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange = {}; + VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferPropertiesEXT; + }; + + struct PhysicalDeviceDescriptorIndexingFeatures + { + using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderInputAttachmentArrayDynamicIndexing, + shaderUniformTexelBufferArrayDynamicIndexing, + shaderStorageTexelBufferArrayDynamicIndexing, + shaderUniformBufferArrayNonUniformIndexing, + shaderSampledImageArrayNonUniformIndexing, + shaderStorageBufferArrayNonUniformIndexing, + shaderStorageImageArrayNonUniformIndexing, + shaderInputAttachmentArrayNonUniformIndexing, + shaderUniformTexelBufferArrayNonUniformIndexing, + shaderStorageTexelBufferArrayNonUniformIndexing, + descriptorBindingUniformBufferUpdateAfterBind, + descriptorBindingSampledImageUpdateAfterBind, + descriptorBindingStorageImageUpdateAfterBind, + descriptorBindingStorageBufferUpdateAfterBind, + descriptorBindingUniformTexelBufferUpdateAfterBind, + descriptorBindingStorageTexelBufferUpdateAfterBind, + descriptorBindingUpdateUnusedWhilePending, + descriptorBindingPartiallyBound, + descriptorBindingVariableDescriptorCount, + runtimeDescriptorArray ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingFeatures; + }; + + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + + struct PhysicalDeviceDescriptorIndexingProperties + { + using NativeType = VkPhysicalDeviceDescriptorIndexingProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxUpdateAfterBindDescriptorsInAllPools, + shaderUniformBufferArrayNonUniformIndexingNative, + shaderSampledImageArrayNonUniformIndexingNative, + shaderStorageBufferArrayNonUniformIndexingNative, + shaderStorageImageArrayNonUniformIndexingNative, + shaderInputAttachmentArrayNonUniformIndexingNative, + robustBufferAccessUpdateAfterBind, + quadDivergentImplicitLod, + maxPerStageDescriptorUpdateAfterBindSamplers, + maxPerStageDescriptorUpdateAfterBindUniformBuffers, + maxPerStageDescriptorUpdateAfterBindStorageBuffers, + maxPerStageDescriptorUpdateAfterBindSampledImages, + maxPerStageDescriptorUpdateAfterBindStorageImages, + maxPerStageDescriptorUpdateAfterBindInputAttachments, + maxPerStageUpdateAfterBindResources, + maxDescriptorSetUpdateAfterBindSamplers, + maxDescriptorSetUpdateAfterBindUniformBuffers, + maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindStorageBuffers, + maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindSampledImages, + maxDescriptorSetUpdateAfterBindStorageImages, + maxDescriptorSetUpdateAfterBindInputAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + void * pNext = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingProperties; + }; + + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + + struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV + { + using NativeType = VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , descriptorPoolOverallocation( descriptorPoolOverallocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & + operator=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & operator=( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & + setDescriptorPoolOverallocation( VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ ) VULKAN_HPP_NOEXCEPT + { + descriptorPoolOverallocation = descriptorPoolOverallocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorPoolOverallocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPoolOverallocation == rhs.descriptorPoolOverallocation ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + }; + + struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE + { + using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , descriptorSetHostMapping( descriptorSetHostMapping_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & + operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & + setDescriptorSetHostMapping( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetHostMapping = descriptorSetHostMapping_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSetHostMapping ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetHostMapping == rhs.descriptorSetHostMapping ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceGeneratedCompute( deviceGeneratedCompute_ ) + , deviceGeneratedComputePipelines( deviceGeneratedComputePipelines_ ) + , deviceGeneratedComputeCaptureReplay( deviceGeneratedComputeCaptureReplay_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedCompute( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCompute = deviceGeneratedCompute_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedComputePipelines( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedComputeCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCompute, deviceGeneratedComputePipelines, deviceGeneratedComputeCaptureReplay ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCompute == rhs.deviceGeneratedCompute ) && + ( deviceGeneratedComputePipelines == rhs.deviceGeneratedComputePipelines ) && + ( deviceGeneratedComputeCaptureReplay == rhs.deviceGeneratedComputeCaptureReplay ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceGeneratedCommands( deviceGeneratedCommands_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCommands ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsStreamCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsStreamStride_ = {}, + uint32_t minSequencesCountBufferOffsetAlignment_ = {}, + uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, + uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) + , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) + , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) + , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ) + , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ) + , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ) + , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ) + , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ) + , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxGraphicsShaderGroupCount, + maxIndirectSequenceCount, + maxIndirectCommandsTokenCount, + maxIndirectCommandsStreamCount, + maxIndirectCommandsTokenOffset, + maxIndirectCommandsStreamStride, + minSequencesCountBufferOffsetAlignment, + minSequencesIndexBufferOffsetAlignment, + minIndirectCommandsBufferOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && + ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && + ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && + ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && + ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + void * pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + }; + + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT + { + using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceMemoryReport( deviceMemoryReport_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & + setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT + { + deviceMemoryReport = deviceMemoryReport_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMemoryReport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; + }; + + struct PhysicalDeviceDiagnosticsConfigFeaturesNV + { + using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , diagnosticsConfig( diagnosticsConfig_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & + setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + { + diagnosticsConfig = diagnosticsConfig_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, diagnosticsConfig ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); +# endif + } + + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + }; + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxDiscardRectangles( maxDiscardRectangles_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxDiscardRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); +# endif + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + void * pNext = {}; + uint32_t maxDiscardRectangles = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceDisplacementMicromapFeaturesNV + { + using NativeType = VkPhysicalDeviceDisplacementMicromapFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , displacementMicromap( displacementMicromap_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDisplacementMicromapFeaturesNV( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDisplacementMicromapFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & + setDisplacementMicromap( VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ ) VULKAN_HPP_NOEXCEPT + { + displacementMicromap = displacementMicromap_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displacementMicromap ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDisplacementMicromapFeaturesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displacementMicromap == rhs.displacementMicromap ); +# endif + } + + bool operator!=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDisplacementMicromapFeaturesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceDisplacementMicromapPropertiesNV + { + using NativeType = VkPhysicalDeviceDisplacementMicromapPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV( uint32_t maxDisplacementMicromapSubdivisionLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxDisplacementMicromapSubdivisionLevel( maxDisplacementMicromapSubdivisionLevel_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDisplacementMicromapPropertiesNV( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDisplacementMicromapPropertiesNV( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDisplacementMicromapPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxDisplacementMicromapSubdivisionLevel ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDisplacementMicromapPropertiesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDisplacementMicromapSubdivisionLevel == rhs.maxDisplacementMicromapSubdivisionLevel ); +# endif + } + + bool operator!=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV; + void * pNext = {}; + uint32_t maxDisplacementMicromapSubdivisionLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDisplacementMicromapPropertiesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDeviceDriverProperties + { + using NativeType = VkPhysicalDeviceDriverProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , driverID( driverID_ ) + , driverName( driverName_ ) + , driverInfo( driverInfo_ ) + , conformanceVersion( conformanceVersion_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDriverProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ConformanceVersion const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) && + ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ); +# endif + } + + bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDriverProperties; + }; + + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + + struct PhysicalDeviceDrmPropertiesEXT + { + using NativeType = VkPhysicalDeviceDrmPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, + int64_t primaryMajor_ = {}, + int64_t primaryMinor_ = {}, + int64_t renderMajor_ = {}, + int64_t renderMinor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , hasPrimary( hasPrimary_ ) + , hasRender( hasRender_ ) + , primaryMajor( primaryMajor_ ) + , primaryMinor( primaryMinor_ ) + , renderMajor( renderMajor_ ) + , renderMinor( renderMinor_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) && ( hasRender == rhs.hasRender ) && + ( primaryMajor == rhs.primaryMajor ) && ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) && + ( renderMinor == rhs.renderMinor ); +# endif + } + + bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasRender = {}; + int64_t primaryMajor = {}; + int64_t primaryMinor = {}; + int64_t renderMajor = {}; + int64_t renderMinor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDrmPropertiesEXT; + }; + + struct PhysicalDeviceDynamicRenderingFeatures + { + using NativeType = VkPhysicalDeviceDynamicRenderingFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dynamicRendering( dynamicRendering_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRendering = dynamicRendering_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRendering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingFeatures; + }; + + using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; + + struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT + { + using NativeType = VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dynamicRenderingUnusedAttachments( dynamicRenderingUnusedAttachments_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + operator=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + operator=( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + setDynamicRenderingUnusedAttachments( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRenderingUnusedAttachments = dynamicRenderingUnusedAttachments_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRenderingUnusedAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingUnusedAttachments == rhs.dynamicRenderingUnusedAttachments ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + }; + + struct PhysicalDeviceExclusiveScissorFeaturesNV + { + using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , exclusiveScissor( exclusiveScissor_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissor = exclusiveScissor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exclusiveScissor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); +# endif + } + + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + }; + + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , extendedDynamicState2( extendedDynamicState2_ ) + , extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ ) + , extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2 = extendedDynamicState2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState2 == rhs.extendedDynamicState2 ) && + ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) && + ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; + }; + + struct PhysicalDeviceExtendedDynamicState3FeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , extendedDynamicState3TessellationDomainOrigin( extendedDynamicState3TessellationDomainOrigin_ ) + , extendedDynamicState3DepthClampEnable( extendedDynamicState3DepthClampEnable_ ) + , extendedDynamicState3PolygonMode( extendedDynamicState3PolygonMode_ ) + , extendedDynamicState3RasterizationSamples( extendedDynamicState3RasterizationSamples_ ) + , extendedDynamicState3SampleMask( extendedDynamicState3SampleMask_ ) + , extendedDynamicState3AlphaToCoverageEnable( extendedDynamicState3AlphaToCoverageEnable_ ) + , extendedDynamicState3AlphaToOneEnable( extendedDynamicState3AlphaToOneEnable_ ) + , extendedDynamicState3LogicOpEnable( extendedDynamicState3LogicOpEnable_ ) + , extendedDynamicState3ColorBlendEnable( extendedDynamicState3ColorBlendEnable_ ) + , extendedDynamicState3ColorBlendEquation( extendedDynamicState3ColorBlendEquation_ ) + , extendedDynamicState3ColorWriteMask( extendedDynamicState3ColorWriteMask_ ) + , extendedDynamicState3RasterizationStream( extendedDynamicState3RasterizationStream_ ) + , extendedDynamicState3ConservativeRasterizationMode( extendedDynamicState3ConservativeRasterizationMode_ ) + , extendedDynamicState3ExtraPrimitiveOverestimationSize( extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) + , extendedDynamicState3DepthClipEnable( extendedDynamicState3DepthClipEnable_ ) + , extendedDynamicState3SampleLocationsEnable( extendedDynamicState3SampleLocationsEnable_ ) + , extendedDynamicState3ColorBlendAdvanced( extendedDynamicState3ColorBlendAdvanced_ ) + , extendedDynamicState3ProvokingVertexMode( extendedDynamicState3ProvokingVertexMode_ ) + , extendedDynamicState3LineRasterizationMode( extendedDynamicState3LineRasterizationMode_ ) + , extendedDynamicState3LineStippleEnable( extendedDynamicState3LineStippleEnable_ ) + , extendedDynamicState3DepthClipNegativeOneToOne( extendedDynamicState3DepthClipNegativeOneToOne_ ) + , extendedDynamicState3ViewportWScalingEnable( extendedDynamicState3ViewportWScalingEnable_ ) + , extendedDynamicState3ViewportSwizzle( extendedDynamicState3ViewportSwizzle_ ) + , extendedDynamicState3CoverageToColorEnable( extendedDynamicState3CoverageToColorEnable_ ) + , extendedDynamicState3CoverageToColorLocation( extendedDynamicState3CoverageToColorLocation_ ) + , extendedDynamicState3CoverageModulationMode( extendedDynamicState3CoverageModulationMode_ ) + , extendedDynamicState3CoverageModulationTableEnable( extendedDynamicState3CoverageModulationTableEnable_ ) + , extendedDynamicState3CoverageModulationTable( extendedDynamicState3CoverageModulationTable_ ) + , extendedDynamicState3CoverageReductionMode( extendedDynamicState3CoverageReductionMode_ ) + , extendedDynamicState3RepresentativeFragmentTestEnable( extendedDynamicState3RepresentativeFragmentTestEnable_ ) + , extendedDynamicState3ShadingRateImageEnable( extendedDynamicState3ShadingRateImageEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3TessellationDomainOrigin( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3PolygonMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3RasterizationSamples( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3SampleMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3AlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3AlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendEquation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorWriteMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3RasterizationStream( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ConservativeRasterizationMode( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ExtraPrimitiveOverestimationSize( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3SampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendAdvanced( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ProvokingVertexMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LineRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LineStippleEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClipNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ViewportSwizzle( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageToColorLocation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageModulationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTableEnable( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageModulationTable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RepresentativeFragmentTestEnable( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + extendedDynamicState3TessellationDomainOrigin, + extendedDynamicState3DepthClampEnable, + extendedDynamicState3PolygonMode, + extendedDynamicState3RasterizationSamples, + extendedDynamicState3SampleMask, + extendedDynamicState3AlphaToCoverageEnable, + extendedDynamicState3AlphaToOneEnable, + extendedDynamicState3LogicOpEnable, + extendedDynamicState3ColorBlendEnable, + extendedDynamicState3ColorBlendEquation, + extendedDynamicState3ColorWriteMask, + extendedDynamicState3RasterizationStream, + extendedDynamicState3ConservativeRasterizationMode, + extendedDynamicState3ExtraPrimitiveOverestimationSize, + extendedDynamicState3DepthClipEnable, + extendedDynamicState3SampleLocationsEnable, + extendedDynamicState3ColorBlendAdvanced, + extendedDynamicState3ProvokingVertexMode, + extendedDynamicState3LineRasterizationMode, + extendedDynamicState3LineStippleEnable, + extendedDynamicState3DepthClipNegativeOneToOne, + extendedDynamicState3ViewportWScalingEnable, + extendedDynamicState3ViewportSwizzle, + extendedDynamicState3CoverageToColorEnable, + extendedDynamicState3CoverageToColorLocation, + extendedDynamicState3CoverageModulationMode, + extendedDynamicState3CoverageModulationTableEnable, + extendedDynamicState3CoverageModulationTable, + extendedDynamicState3CoverageReductionMode, + extendedDynamicState3RepresentativeFragmentTestEnable, + extendedDynamicState3ShadingRateImageEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin ) && + ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable ) && + ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode ) && + ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples ) && + ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask ) && + ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable ) && + ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable ) && + ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable ) && + ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable ) && + ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation ) && + ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask ) && + ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream ) && + ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode ) && + ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize ) && + ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable ) && + ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable ) && + ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced ) && + ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode ) && + ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode ) && + ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable ) && + ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne ) && + ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable ) && + ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle ) && + ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable ) && + ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation ) && + ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode ) && + ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable ) && + ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable ) && + ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode ) && + ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable ) && + ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT; + }; + + struct PhysicalDeviceExtendedDynamicState3PropertiesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dynamicPrimitiveTopologyUnrestricted( dynamicPrimitiveTopologyUnrestricted_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT & + setDynamicPrimitiveTopologyUnrestricted( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ ) VULKAN_HPP_NOEXCEPT + { + dynamicPrimitiveTopologyUnrestricted = dynamicPrimitiveTopologyUnrestricted_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT; + }; + + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , extendedDynamicState( extendedDynamicState_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & + setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState = extendedDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; + }; + + struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV + { + using NativeType = VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , extendedSparseAddressSpace( extendedSparseAddressSpace_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & + operator=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & + setExtendedSparseAddressSpace( VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ ) VULKAN_HPP_NOEXCEPT + { + extendedSparseAddressSpace = extendedSparseAddressSpace_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedSparseAddressSpace ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpace == rhs.extendedSparseAddressSpace ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + }; + + struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV + { + using NativeType = VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VULKAN_HPP_NAMESPACE::DeviceSize extendedSparseAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , extendedSparseAddressSpaceSize( extendedSparseAddressSpaceSize_ ) + , extendedSparseImageUsageFlags( extendedSparseImageUsageFlags_ ) + , extendedSparseBufferUsageFlags( extendedSparseBufferUsageFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & + operator=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedSparseAddressSpaceSize, extendedSparseImageUsageFlags, extendedSparseBufferUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpaceSize == rhs.extendedSparseAddressSpaceSize ) && + ( extendedSparseImageUsageFlags == rhs.extendedSparseImageUsageFlags ) && ( extendedSparseBufferUsageFlags == rhs.extendedSparseBufferUsageFlags ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize extendedSparseAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + }; + + struct PhysicalDeviceExternalBufferInfo + { + using NativeType = VkPhysicalDeviceExternalBufferInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , usage( usage_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalBufferInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, usage, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalBufferInfo; + }; + + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + + struct PhysicalDeviceExternalFenceInfo + { + using NativeType = VkPhysicalDeviceExternalFenceInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFenceInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFenceInfo; + }; + + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct PhysicalDeviceExternalFormatResolveFeaturesANDROID + { + using NativeType = VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , externalFormatResolve( externalFormatResolve_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFormatResolveFeaturesANDROID( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFormatResolveFeaturesANDROID( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFormatResolveFeaturesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFormatResolveFeaturesANDROID & + operator=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & + setExternalFormatResolve( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ ) VULKAN_HPP_NOEXCEPT + { + externalFormatResolve = externalFormatResolve_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormatResolve ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormatResolve == rhs.externalFormatResolve ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct PhysicalDeviceExternalFormatResolvePropertiesANDROID + { + using NativeType = VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolvePropertiesANDROID( + VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , nullColorAttachmentWithExternalFormatResolve( nullColorAttachmentWithExternalFormatResolve_ ) + , externalFormatResolveChromaOffsetX( externalFormatResolveChromaOffsetX_ ) + , externalFormatResolveChromaOffsetY( externalFormatResolveChromaOffsetY_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFormatResolvePropertiesANDROID( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFormatResolvePropertiesANDROID( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFormatResolvePropertiesANDROID & + operator=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nullColorAttachmentWithExternalFormatResolve, externalFormatResolveChromaOffsetX, externalFormatResolveChromaOffsetY ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( nullColorAttachmentWithExternalFormatResolve == rhs.nullColorAttachmentWithExternalFormatResolve ) && + ( externalFormatResolveChromaOffsetX == rhs.externalFormatResolveChromaOffsetX ) && + ( externalFormatResolveChromaOffsetY == rhs.externalFormatResolveChromaOffsetY ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct PhysicalDeviceExternalImageFormatInfo + { + using NativeType = VkPhysicalDeviceExternalImageFormatInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalImageFormatInfo; + }; + + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minImportedHostPointerAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; + }; + + struct PhysicalDeviceExternalMemoryRDMAFeaturesNV + { + using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , externalMemoryRDMA( externalMemoryRDMA_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & + setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT + { + externalMemoryRDMA = externalMemoryRDMA_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryRDMA ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryRDMA == rhs.externalMemoryRDMA ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV; + }; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX + { + using NativeType = VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , screenBufferImport( screenBufferImport_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & + operator=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & + setScreenBufferImport( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ ) VULKAN_HPP_NOEXCEPT + { + screenBufferImport = screenBufferImport_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, screenBufferImport ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( screenBufferImport == rhs.screenBufferImport ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + struct PhysicalDeviceExternalSemaphoreInfo + { + using NativeType = VkPhysicalDeviceExternalSemaphoreInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalSemaphoreInfo; + }; + + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + + struct PhysicalDeviceFaultFeaturesEXT + { + using NativeType = VkPhysicalDeviceFaultFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFaultFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceFault( deviceFault_ ) + , deviceFaultVendorBinary( deviceFaultVendorBinary_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ ) VULKAN_HPP_NOEXCEPT + { + deviceFault = deviceFault_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & + setDeviceFaultVendorBinary( VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ ) VULKAN_HPP_NOEXCEPT + { + deviceFaultVendorBinary = deviceFaultVendorBinary_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceFault == rhs.deviceFault ) && + ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary ); +# endif + } + + bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFaultFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceFault = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFaultFeaturesEXT; + }; + + struct PhysicalDeviceFeatures2 + { + using NativeType = VkPhysicalDeviceFeatures2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , features( features_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + { + features = features_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, features ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); +# endif + } + + bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFeatures2; + }; + + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + + struct PhysicalDeviceFloatControlsProperties + { + using NativeType = VkPhysicalDeviceFloatControlsProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFloatControlsProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + denormBehaviorIndependence, + roundingModeIndependence, + shaderSignedZeroInfNanPreserveFloat16, + shaderSignedZeroInfNanPreserveFloat32, + shaderSignedZeroInfNanPreserveFloat64, + shaderDenormPreserveFloat16, + shaderDenormPreserveFloat32, + shaderDenormPreserveFloat64, + shaderDenormFlushToZeroFloat16, + shaderDenormFlushToZeroFloat32, + shaderDenormFlushToZeroFloat64, + shaderRoundingModeRTEFloat16, + shaderRoundingModeRTEFloat32, + shaderRoundingModeRTEFloat64, + shaderRoundingModeRTZFloat16, + shaderRoundingModeRTZFloat32, + shaderRoundingModeRTZFloat64 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); +# endif + } + + bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFloatControlsProperties; + }; + + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentDensityMapDeferred( fragmentDensityMapDeferred_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & + setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDeferred = fragmentDensityMapDeferred_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapDeferred ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, + uint32_t maxSubsampledArrayLayers_ = {}, + uint32_t maxDescriptorSetSubsampledSamplers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , subsampledLoads( subsampledLoads_ ) + , subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ) + , maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ) + , maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) && + ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) && + ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; + uint32_t maxSubsampledArrayLayers = {}; + uint32_t maxDescriptorSetSubsampledSamplers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentDensityMap( fragmentDensityMap_ ) + , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) + , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMap = fragmentDensityMap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDynamic = fragmentDensityMapDynamic_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && + ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && + ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM + { + using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentDensityMapOffset( fragmentDensityMapOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & + operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & + setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapOffset = fragmentDensityMapOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + }; + + struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM + { + using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentDensityOffsetGranularity( fragmentDensityOffsetGranularity_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & + operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityOffsetGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + }; + + struct PhysicalDeviceFragmentDensityMapPropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) + , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) + , fragmentDensityInvocations( fragmentDensityInvocations_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && + ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; + }; + + struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR + { + using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentShaderBarycentric( fragmentShaderBarycentric_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & + operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & + setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderBarycentric = fragmentShaderBarycentric_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShaderBarycentric ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + }; + + using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + + struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR + { + using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , triStripVertexOrderIndependentOfProvokingVertex( triStripVertexOrderIndependentOfProvokingVertex_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & + operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + }; + + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) + , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) + , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && + ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && + ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV + { + using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentShadingRateEnums( fragmentShadingRateEnums_ ) + , supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ) + , noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShadingRateEnums = fragmentShadingRateEnums_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + supersampleFragmentShadingRates = supersampleFragmentShadingRates_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) && + ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) && + ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {}; + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {}; + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV + { + using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + setMaxFragmentShadingRateInvocationCount( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT + { + maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + }; + + struct PhysicalDeviceFragmentShadingRateFeaturesKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ) + , primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ) + , attachmentFragmentShadingRate( attachmentFragmentShadingRate_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + pipelineFragmentShadingRate = pipelineFragmentShadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRate = primitiveFragmentShadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFragmentShadingRate = attachmentFragmentShadingRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) && + ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; + }; + + struct PhysicalDeviceFragmentShadingRateKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRateKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , sampleCounts( sampleCounts_ ) + , fragmentSize( fragmentSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleCounts, fragmentSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && ( fragmentSize == rhs.fragmentSize ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateKHR; + }; + + struct PhysicalDeviceFragmentShadingRatePropertiesKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, + uint32_t maxFragmentSizeAspectRatio_ = {}, + uint32_t maxFragmentShadingRateCoverageSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ) + , maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ) + , maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ) + , primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ) + , layeredShadingRateAttachments( layeredShadingRateAttachments_ ) + , fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ) + , maxFragmentSize( maxFragmentSize_ ) + , maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ) + , maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ) + , maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ) + , fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ) + , fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ) + , fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ) + , fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ) + , fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ) + , fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ) + , fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minFragmentShadingRateAttachmentTexelSize, + maxFragmentShadingRateAttachmentTexelSize, + maxFragmentShadingRateAttachmentTexelSizeAspectRatio, + primitiveFragmentShadingRateWithMultipleViewports, + layeredShadingRateAttachments, + fragmentShadingRateNonTrivialCombinerOps, + maxFragmentSize, + maxFragmentSizeAspectRatio, + maxFragmentShadingRateCoverageSamples, + maxFragmentShadingRateRasterizationSamples, + fragmentShadingRateWithShaderDepthStencilWrites, + fragmentShadingRateWithSampleMask, + fragmentShadingRateWithShaderSampleMask, + fragmentShadingRateWithConservativeRasterization, + fragmentShadingRateWithFragmentShaderInterlock, + fragmentShadingRateWithCustomSampleLocations, + fragmentShadingRateStrictMultiplyCombiner ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) && + ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports ) && + ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) && + ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && ( maxFragmentSize == rhs.maxFragmentSize ) && + ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) && + ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) && + ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) && + ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites ) && + ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) && + ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) && + ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization ) && + ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) && + ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) && + ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {}; + uint32_t maxFragmentSizeAspectRatio = {}; + uint32_t maxFragmentShadingRateCoverageSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; + }; + + struct PhysicalDeviceFrameBoundaryFeaturesEXT + { + using NativeType = VkPhysicalDeviceFrameBoundaryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , frameBoundary( frameBoundary_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFrameBoundaryFeaturesEXT( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFrameBoundaryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setFrameBoundary( VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ ) VULKAN_HPP_NOEXCEPT + { + frameBoundary = frameBoundary_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFrameBoundaryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, frameBoundary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFrameBoundaryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( frameBoundary == rhs.frameBoundary ); +# endif + } + + bool operator!=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 frameBoundary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFrameBoundaryFeaturesEXT; + }; + + struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR + { + using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , globalPriorityQuery( globalPriorityQuery_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGlobalPriorityQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & + setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT + { + globalPriorityQuery = globalPriorityQuery_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, globalPriorityQuery ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ); +# endif + } + + bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + }; + + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + + struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT + { + using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , graphicsPipelineLibrary( graphicsPipelineLibrary_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & + setGraphicsPipelineLibrary( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ ) VULKAN_HPP_NOEXCEPT + { + graphicsPipelineLibrary = graphicsPipelineLibrary_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, graphicsPipelineLibrary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary ); +# endif + } + + bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + }; + + struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT + { + using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , graphicsPipelineLibraryFastLinking( graphicsPipelineLibraryFastLinking_ ) + , graphicsPipelineLibraryIndependentInterpolationDecoration( graphicsPipelineLibraryIndependentInterpolationDecoration_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & + operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & + setGraphicsPipelineLibraryFastLinking( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ ) VULKAN_HPP_NOEXCEPT + { + graphicsPipelineLibraryFastLinking = graphicsPipelineLibraryFastLinking_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setGraphicsPipelineLibraryIndependentInterpolationDecoration( + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ ) VULKAN_HPP_NOEXCEPT + { + graphicsPipelineLibraryIndependentInterpolationDecoration = graphicsPipelineLibraryIndependentInterpolationDecoration_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking ) && + ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration ); +# endif + } + + bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + }; + + struct PhysicalDeviceGroupProperties + { + using NativeType = VkPhysicalDeviceGroupProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, + std::array const & physicalDevices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , physicalDeviceCount( physicalDeviceCount_ ) + , physicalDevices( physicalDevices_ ) + , subsetAllocation( subsetAllocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGroupProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); +# endif + } + + bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + void * pNext = {}; + uint32_t physicalDeviceCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D physicalDevices = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGroupProperties; + }; + + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + + struct PhysicalDeviceHostImageCopyFeaturesEXT + { + using NativeType = VkPhysicalDeviceHostImageCopyFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , hostImageCopy( hostImageCopy_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostImageCopyFeaturesEXT( VkPhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHostImageCopyFeaturesEXT & operator=( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceHostImageCopyFeaturesEXT & operator=( VkPhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeaturesEXT & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT + { + hostImageCopy = hostImageCopy_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceHostImageCopyFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hostImageCopy ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostImageCopyFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostImageCopy == rhs.hostImageCopy ); +# endif + } + + bool operator!=( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostImageCopyFeaturesEXT; + }; + + struct PhysicalDeviceHostImageCopyPropertiesEXT + { + using NativeType = VkPhysicalDeviceHostImageCopyPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT( uint32_t copySrcLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, + uint32_t copyDstLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , copySrcLayoutCount( copySrcLayoutCount_ ) + , pCopySrcLayouts( pCopySrcLayouts_ ) + , copyDstLayoutCount( copyDstLayoutCount_ ) + , pCopyDstLayouts( pCopyDstLayouts_ ) + , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ) + , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostImageCopyPropertiesEXT( VkPhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , copySrcLayoutCount( static_cast( copySrcLayouts_.size() ) ) + , pCopySrcLayouts( copySrcLayouts_.data() ) + , copyDstLayoutCount( static_cast( copyDstLayouts_.size() ) ) + , pCopyDstLayouts( copyDstLayouts_.data() ) + , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ) + , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceHostImageCopyPropertiesEXT & operator=( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceHostImageCopyPropertiesEXT & operator=( VkPhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + copySrcLayoutCount = copySrcLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + setPCopySrcLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pCopySrcLayouts = pCopySrcLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyPropertiesEXT & + setCopySrcLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT + { + copySrcLayoutCount = static_cast( copySrcLayouts_.size() ); + pCopySrcLayouts = copySrcLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + copyDstLayoutCount = copyDstLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + setPCopyDstLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pCopyDstLayouts = pCopyDstLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyPropertiesEXT & + setCopyDstLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT + { + copyDstLayoutCount = static_cast( copyDstLayouts_.size() ); + pCopyDstLayouts = copyDstLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + setOptimalTilingLayoutUUID( std::array optimalTilingLayoutUUID_ ) VULKAN_HPP_NOEXCEPT + { + optimalTilingLayoutUUID = optimalTilingLayoutUUID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + setIdenticalMemoryTypeRequirements( VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ ) VULKAN_HPP_NOEXCEPT + { + identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceHostImageCopyPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostImageCopyPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && + ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) && + ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); +# endif + } + + bool operator!=( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT; + void * pNext = {}; + uint32_t copySrcLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {}; + uint32_t copyDstLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D optimalTilingLayoutUUID = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostImageCopyPropertiesEXT; + }; + + struct PhysicalDeviceHostQueryResetFeatures + { + using NativeType = VkPhysicalDeviceHostQueryResetFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , hostQueryReset( hostQueryReset_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hostQueryReset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); +# endif + } + + bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostQueryResetFeatures; + }; + + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + + struct PhysicalDeviceIDProperties + { + using NativeType = VkPhysicalDeviceIDProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceUUID( deviceUUID_ ) + , driverUUID( driverUUID_ ) + , deviceLUID( deviceLUID_ ) + , deviceNodeMask( deviceNodeMask_ ) + , deviceLUIDValid( deviceLUIDValid_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIDProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && + ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); +# endif + } + + bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceIDProperties; + }; + + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + + struct PhysicalDeviceImage2DViewOf3DFeaturesEXT + { + using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image2DViewOf3D( image2DViewOf3D_ ) + , sampler2DViewOf3D( sampler2DViewOf3D_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + image2DViewOf3D = image2DViewOf3D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & + setSampler2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + sampler2DViewOf3D = sampler2DViewOf3D_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3D == rhs.image2DViewOf3D ) && ( sampler2DViewOf3D == rhs.sampler2DViewOf3D ); +# endif + } + + bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT; + }; + + struct PhysicalDeviceImageCompressionControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageCompressionControl( imageCompressionControl_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & + setImageCompressionControl( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ ) VULKAN_HPP_NOEXCEPT + { + imageCompressionControl = imageCompressionControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCompressionControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControl == rhs.imageCompressionControl ); +# endif + } + + bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageCompressionControlFeaturesEXT; + }; + + struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageCompressionControlSwapchain( imageCompressionControlSwapchain_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + setImageCompressionControlSwapchain( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + imageCompressionControlSwapchain = imageCompressionControlSwapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCompressionControlSwapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain ); +# endif + } + + bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + }; + + struct PhysicalDeviceImageDrmFormatModifierInfoEXT + { + using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); +# endif + } + + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + }; + + struct PhysicalDeviceImageFormatInfo2 + { + using NativeType = VkPhysicalDeviceImageFormatInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , format( format_ ) + , type( type_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, type, tiling, usage, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( tiling == rhs.tiling ) && + ( usage == rhs.usage ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageFormatInfo2; + }; + + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + + struct PhysicalDeviceImageProcessing2FeaturesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessing2FeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , textureBlockMatch2( textureBlockMatch2_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessing2FeaturesQCOM( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessing2FeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & + setTextureBlockMatch2( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ ) VULKAN_HPP_NOEXCEPT + { + textureBlockMatch2 = textureBlockMatch2_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2FeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureBlockMatch2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessing2FeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureBlockMatch2 == rhs.textureBlockMatch2 ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessing2FeaturesQCOM; + }; + + struct PhysicalDeviceImageProcessing2PropertiesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessing2PropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxBlockMatchWindow( maxBlockMatchWindow_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessing2PropertiesQCOM( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessing2PropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2PropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxBlockMatchWindow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessing2PropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBlockMatchWindow == rhs.maxBlockMatchWindow ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessing2PropertiesQCOM; + }; + + struct PhysicalDeviceImageProcessingFeaturesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , textureSampleWeighted( textureSampleWeighted_ ) + , textureBoxFilter( textureBoxFilter_ ) + , textureBlockMatch( textureBlockMatch_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureSampleWeighted( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ ) VULKAN_HPP_NOEXCEPT + { + textureSampleWeighted = textureSampleWeighted_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureBoxFilter( VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ ) VULKAN_HPP_NOEXCEPT + { + textureBoxFilter = textureBoxFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureBlockMatch( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ ) VULKAN_HPP_NOEXCEPT + { + textureBlockMatch = textureBlockMatch_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureSampleWeighted == rhs.textureSampleWeighted ) && + ( textureBoxFilter == rhs.textureBoxFilter ) && ( textureBlockMatch == rhs.textureBlockMatch ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessingFeaturesQCOM; + }; + + struct PhysicalDeviceImageProcessingPropertiesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( uint32_t maxWeightFilterPhases_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxWeightFilterPhases( maxWeightFilterPhases_ ) + , maxWeightFilterDimension( maxWeightFilterDimension_ ) + , maxBlockMatchRegion( maxBlockMatchRegion_ ) + , maxBoxFilterBlockSize( maxBoxFilterBlockSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWeightFilterPhases == rhs.maxWeightFilterPhases ) && + ( maxWeightFilterDimension == rhs.maxWeightFilterDimension ) && ( maxBlockMatchRegion == rhs.maxBlockMatchRegion ) && + ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; + void * pNext = {}; + uint32_t maxWeightFilterPhases = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessingPropertiesQCOM; + }; + + struct PhysicalDeviceImageRobustnessFeatures + { + using NativeType = VkPhysicalDeviceImageRobustnessFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , robustImageAccess( robustImageAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustImageAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ); +# endif + } + + bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageRobustnessFeatures; + }; + + using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; + + struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageSlicedViewOf3D( imageSlicedViewOf3D_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & + setImageSlicedViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + imageSlicedViewOf3D = imageSlicedViewOf3D_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageSlicedViewOf3D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSlicedViewOf3D == rhs.imageSlicedViewOf3D ); +# endif + } + + bool operator!=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + }; + + struct PhysicalDeviceImageViewImageFormatInfoEXT + { + using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageViewType( imageViewType_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & + setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + { + imageViewType = imageViewType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageViewType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); +# endif + } + + bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + }; + + struct PhysicalDeviceImageViewMinLodFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minLod( minLod_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minLod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); +# endif + } + + bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 minLod = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewMinLodFeaturesEXT; + }; + + struct PhysicalDeviceImagelessFramebufferFeatures + { + using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imagelessFramebuffer( imagelessFramebuffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imagelessFramebuffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); +# endif + } + + bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImagelessFramebufferFeatures; + }; + + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + + struct PhysicalDeviceIndexTypeUint8FeaturesEXT + { + using NativeType = VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , indexTypeUint8( indexTypeUint8_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIndexTypeUint8FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indexTypeUint8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); +# endif + } + + bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT; + }; + + struct PhysicalDeviceInheritedViewportScissorFeaturesNV + { + using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , inheritedViewportScissor2D( inheritedViewportScissor2D_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & + setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + inheritedViewportScissor2D = inheritedViewportScissor2D_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inheritedViewportScissor2D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D ); +# endif + } + + bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; + }; + + struct PhysicalDeviceInlineUniformBlockFeatures + { + using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , inlineUniformBlock( inlineUniformBlock_ ) + , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & + setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); +# endif + } + + bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockFeatures; + }; + + using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; + + struct PhysicalDeviceInlineUniformBlockProperties + { + using NativeType = VkPhysicalDeviceInlineUniformBlockProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) + , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) + , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxInlineUniformBlockSize, + maxPerStageDescriptorInlineUniformBlocks, + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, + maxDescriptorSetInlineUniformBlocks, + maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); +# endif + } + + bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; + void * pNext = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockProperties; + }; + + using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; + + struct PhysicalDeviceInvocationMaskFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , invocationMask( invocationMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT + { + invocationMask = invocationMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, invocationMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( invocationMask == rhs.invocationMask ); +# endif + } + + bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; + }; + + struct PhysicalDeviceLayeredDriverPropertiesMSFT + { + using NativeType = VkPhysicalDeviceLayeredDriverPropertiesMSFT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( + VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI_ = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , underlyingAPI( underlyingAPI_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredDriverPropertiesMSFT( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredDriverPropertiesMSFT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, underlyingAPI ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredDriverPropertiesMSFT const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( underlyingAPI == rhs.underlyingAPI ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredDriverPropertiesMSFT; + }; + + struct PhysicalDeviceLegacyDitheringFeaturesEXT + { + using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , legacyDithering( legacyDithering_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT + { + legacyDithering = legacyDithering_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, legacyDithering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyDitheringFeaturesEXT; + }; + + struct PhysicalDeviceLimits + { + using NativeType = VkPhysicalDeviceLimits; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, + uint32_t maxImageDimension2D_ = {}, + uint32_t maxImageDimension3D_ = {}, + uint32_t maxImageDimensionCube_ = {}, + uint32_t maxImageArrayLayers_ = {}, + uint32_t maxTexelBufferElements_ = {}, + uint32_t maxUniformBufferRange_ = {}, + uint32_t maxStorageBufferRange_ = {}, + uint32_t maxPushConstantsSize_ = {}, + uint32_t maxMemoryAllocationCount_ = {}, + uint32_t maxSamplerAllocationCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, + uint32_t maxBoundDescriptorSets_ = {}, + uint32_t maxPerStageDescriptorSamplers_ = {}, + uint32_t maxPerStageDescriptorUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorSampledImages_ = {}, + uint32_t maxPerStageDescriptorStorageImages_ = {}, + uint32_t maxPerStageDescriptorInputAttachments_ = {}, + uint32_t maxPerStageResources_ = {}, + uint32_t maxDescriptorSetSamplers_ = {}, + uint32_t maxDescriptorSetUniformBuffers_ = {}, + uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetStorageBuffers_ = {}, + uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetSampledImages_ = {}, + uint32_t maxDescriptorSetStorageImages_ = {}, + uint32_t maxDescriptorSetInputAttachments_ = {}, + uint32_t maxVertexInputAttributes_ = {}, + uint32_t maxVertexInputBindings_ = {}, + uint32_t maxVertexInputAttributeOffset_ = {}, + uint32_t maxVertexInputBindingStride_ = {}, + uint32_t maxVertexOutputComponents_ = {}, + uint32_t maxTessellationGenerationLevel_ = {}, + uint32_t maxTessellationPatchSize_ = {}, + uint32_t maxTessellationControlPerVertexInputComponents_ = {}, + uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, + uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, + uint32_t maxTessellationControlTotalOutputComponents_ = {}, + uint32_t maxTessellationEvaluationInputComponents_ = {}, + uint32_t maxTessellationEvaluationOutputComponents_ = {}, + uint32_t maxGeometryShaderInvocations_ = {}, + uint32_t maxGeometryInputComponents_ = {}, + uint32_t maxGeometryOutputComponents_ = {}, + uint32_t maxGeometryOutputVertices_ = {}, + uint32_t maxGeometryTotalOutputComponents_ = {}, + uint32_t maxFragmentInputComponents_ = {}, + uint32_t maxFragmentOutputAttachments_ = {}, + uint32_t maxFragmentDualSrcAttachments_ = {}, + uint32_t maxFragmentCombinedOutputResources_ = {}, + uint32_t maxComputeSharedMemorySize_ = {}, + std::array const & maxComputeWorkGroupCount_ = {}, + uint32_t maxComputeWorkGroupInvocations_ = {}, + std::array const & maxComputeWorkGroupSize_ = {}, + uint32_t subPixelPrecisionBits_ = {}, + uint32_t subTexelPrecisionBits_ = {}, + uint32_t mipmapPrecisionBits_ = {}, + uint32_t maxDrawIndexedIndexValue_ = {}, + uint32_t maxDrawIndirectCount_ = {}, + float maxSamplerLodBias_ = {}, + float maxSamplerAnisotropy_ = {}, + uint32_t maxViewports_ = {}, + std::array const & maxViewportDimensions_ = {}, + std::array const & viewportBoundsRange_ = {}, + uint32_t viewportSubPixelBits_ = {}, + size_t minMemoryMapAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, + int32_t minTexelOffset_ = {}, + uint32_t maxTexelOffset_ = {}, + int32_t minTexelGatherOffset_ = {}, + uint32_t maxTexelGatherOffset_ = {}, + float minInterpolationOffset_ = {}, + float maxInterpolationOffset_ = {}, + uint32_t subPixelInterpolationOffsetBits_ = {}, + uint32_t maxFramebufferWidth_ = {}, + uint32_t maxFramebufferHeight_ = {}, + uint32_t maxFramebufferLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, + uint32_t maxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, + uint32_t maxSampleMaskWords_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, + float timestampPeriod_ = {}, + uint32_t maxClipDistances_ = {}, + uint32_t maxCullDistances_ = {}, + uint32_t maxCombinedClipAndCullDistances_ = {}, + uint32_t discreteQueuePriorities_ = {}, + std::array const & pointSizeRange_ = {}, + std::array const & lineWidthRange_ = {}, + float pointSizeGranularity_ = {}, + float lineWidthGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxImageDimension1D( maxImageDimension1D_ ) + , maxImageDimension2D( maxImageDimension2D_ ) + , maxImageDimension3D( maxImageDimension3D_ ) + , maxImageDimensionCube( maxImageDimensionCube_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , maxTexelBufferElements( maxTexelBufferElements_ ) + , maxUniformBufferRange( maxUniformBufferRange_ ) + , maxStorageBufferRange( maxStorageBufferRange_ ) + , maxPushConstantsSize( maxPushConstantsSize_ ) + , maxMemoryAllocationCount( maxMemoryAllocationCount_ ) + , maxSamplerAllocationCount( maxSamplerAllocationCount_ ) + , bufferImageGranularity( bufferImageGranularity_ ) + , sparseAddressSpaceSize( sparseAddressSpaceSize_ ) + , maxBoundDescriptorSets( maxBoundDescriptorSets_ ) + , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ) + , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ) + , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ) + , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ) + , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ) + , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ) + , maxPerStageResources( maxPerStageResources_ ) + , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ) + , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ) + , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ) + , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ) + , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ) + , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ) + , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ) + , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ) + , maxVertexInputAttributes( maxVertexInputAttributes_ ) + , maxVertexInputBindings( maxVertexInputBindings_ ) + , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ) + , maxVertexInputBindingStride( maxVertexInputBindingStride_ ) + , maxVertexOutputComponents( maxVertexOutputComponents_ ) + , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ) + , maxTessellationPatchSize( maxTessellationPatchSize_ ) + , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ) + , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ) + , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ) + , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ) + , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ) + , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ) + , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ) + , maxGeometryInputComponents( maxGeometryInputComponents_ ) + , maxGeometryOutputComponents( maxGeometryOutputComponents_ ) + , maxGeometryOutputVertices( maxGeometryOutputVertices_ ) + , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ) + , maxFragmentInputComponents( maxFragmentInputComponents_ ) + , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ) + , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ) + , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ) + , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ) + , maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ) + , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ) + , maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ) + , subPixelPrecisionBits( subPixelPrecisionBits_ ) + , subTexelPrecisionBits( subTexelPrecisionBits_ ) + , mipmapPrecisionBits( mipmapPrecisionBits_ ) + , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ) + , maxDrawIndirectCount( maxDrawIndirectCount_ ) + , maxSamplerLodBias( maxSamplerLodBias_ ) + , maxSamplerAnisotropy( maxSamplerAnisotropy_ ) + , maxViewports( maxViewports_ ) + , maxViewportDimensions( maxViewportDimensions_ ) + , viewportBoundsRange( viewportBoundsRange_ ) + , viewportSubPixelBits( viewportSubPixelBits_ ) + , minMemoryMapAlignment( minMemoryMapAlignment_ ) + , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ) + , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ) + , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ) + , minTexelOffset( minTexelOffset_ ) + , maxTexelOffset( maxTexelOffset_ ) + , minTexelGatherOffset( minTexelGatherOffset_ ) + , maxTexelGatherOffset( maxTexelGatherOffset_ ) + , minInterpolationOffset( minInterpolationOffset_ ) + , maxInterpolationOffset( maxInterpolationOffset_ ) + , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ) + , maxFramebufferWidth( maxFramebufferWidth_ ) + , maxFramebufferHeight( maxFramebufferHeight_ ) + , maxFramebufferLayers( maxFramebufferLayers_ ) + , framebufferColorSampleCounts( framebufferColorSampleCounts_ ) + , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ) + , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ) + , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ) + , maxColorAttachments( maxColorAttachments_ ) + , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ) + , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ) + , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ) + , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ) + , storageImageSampleCounts( storageImageSampleCounts_ ) + , maxSampleMaskWords( maxSampleMaskWords_ ) + , timestampComputeAndGraphics( timestampComputeAndGraphics_ ) + , timestampPeriod( timestampPeriod_ ) + , maxClipDistances( maxClipDistances_ ) + , maxCullDistances( maxCullDistances_ ) + , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ) + , discreteQueuePriorities( discreteQueuePriorities_ ) + , pointSizeRange( pointSizeRange_ ) + , lineWidthRange( lineWidthRange_ ) + , pointSizeGranularity( pointSizeGranularity_ ) + , lineWidthGranularity( lineWidthGranularity_ ) + , strictLines( strictLines_ ) + , standardSampleLocations( standardSampleLocations_ ) + , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ) + , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ) + , nonCoherentAtomSize( nonCoherentAtomSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLimits( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + float const &, + float const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + size_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + int32_t const &, + uint32_t const &, + int32_t const &, + uint32_t const &, + float const &, + float const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + float const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + float const &, + float const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( maxImageDimension1D, + maxImageDimension2D, + maxImageDimension3D, + maxImageDimensionCube, + maxImageArrayLayers, + maxTexelBufferElements, + maxUniformBufferRange, + maxStorageBufferRange, + maxPushConstantsSize, + maxMemoryAllocationCount, + maxSamplerAllocationCount, + bufferImageGranularity, + sparseAddressSpaceSize, + maxBoundDescriptorSets, + maxPerStageDescriptorSamplers, + maxPerStageDescriptorUniformBuffers, + maxPerStageDescriptorStorageBuffers, + maxPerStageDescriptorSampledImages, + maxPerStageDescriptorStorageImages, + maxPerStageDescriptorInputAttachments, + maxPerStageResources, + maxDescriptorSetSamplers, + maxDescriptorSetUniformBuffers, + maxDescriptorSetUniformBuffersDynamic, + maxDescriptorSetStorageBuffers, + maxDescriptorSetStorageBuffersDynamic, + maxDescriptorSetSampledImages, + maxDescriptorSetStorageImages, + maxDescriptorSetInputAttachments, + maxVertexInputAttributes, + maxVertexInputBindings, + maxVertexInputAttributeOffset, + maxVertexInputBindingStride, + maxVertexOutputComponents, + maxTessellationGenerationLevel, + maxTessellationPatchSize, + maxTessellationControlPerVertexInputComponents, + maxTessellationControlPerVertexOutputComponents, + maxTessellationControlPerPatchOutputComponents, + maxTessellationControlTotalOutputComponents, + maxTessellationEvaluationInputComponents, + maxTessellationEvaluationOutputComponents, + maxGeometryShaderInvocations, + maxGeometryInputComponents, + maxGeometryOutputComponents, + maxGeometryOutputVertices, + maxGeometryTotalOutputComponents, + maxFragmentInputComponents, + maxFragmentOutputAttachments, + maxFragmentDualSrcAttachments, + maxFragmentCombinedOutputResources, + maxComputeSharedMemorySize, + maxComputeWorkGroupCount, + maxComputeWorkGroupInvocations, + maxComputeWorkGroupSize, + subPixelPrecisionBits, + subTexelPrecisionBits, + mipmapPrecisionBits, + maxDrawIndexedIndexValue, + maxDrawIndirectCount, + maxSamplerLodBias, + maxSamplerAnisotropy, + maxViewports, + maxViewportDimensions, + viewportBoundsRange, + viewportSubPixelBits, + minMemoryMapAlignment, + minTexelBufferOffsetAlignment, + minUniformBufferOffsetAlignment, + minStorageBufferOffsetAlignment, + minTexelOffset, + maxTexelOffset, + minTexelGatherOffset, + maxTexelGatherOffset, + minInterpolationOffset, + maxInterpolationOffset, + subPixelInterpolationOffsetBits, + maxFramebufferWidth, + maxFramebufferHeight, + maxFramebufferLayers, + framebufferColorSampleCounts, + framebufferDepthSampleCounts, + framebufferStencilSampleCounts, + framebufferNoAttachmentsSampleCounts, + maxColorAttachments, + sampledImageColorSampleCounts, + sampledImageIntegerSampleCounts, + sampledImageDepthSampleCounts, + sampledImageStencilSampleCounts, + storageImageSampleCounts, + maxSampleMaskWords, + timestampComputeAndGraphics, + timestampPeriod, + maxClipDistances, + maxCullDistances, + maxCombinedClipAndCullDistances, + discreteQueuePriorities, + pointSizeRange, + lineWidthRange, + pointSizeGranularity, + lineWidthGranularity, + strictLines, + standardSampleLocations, + optimalBufferCopyOffsetAlignment, + optimalBufferCopyRowPitchAlignment, + nonCoherentAtomSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLimits const & ) const = default; +#else + bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && + ( maxImageDimension3D == rhs.maxImageDimension3D ) && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && + ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && + ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && + ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && ( bufferImageGranularity == rhs.bufferImageGranularity ) && + ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && + ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && + ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && + ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && + ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && + ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && + ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && ( maxPerStageResources == rhs.maxPerStageResources ) && + ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && + ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && + ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && + ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && + ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && + ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && + ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && + ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && + ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && + ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && + ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) && + ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && + ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && + ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && + ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && + ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && + ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && + ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && + ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && + ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && + ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && + ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && + ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && + ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && + ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && + ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && + ( maxViewportDimensions == rhs.maxViewportDimensions ) && ( viewportBoundsRange == rhs.viewportBoundsRange ) && + ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && + ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && + ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && + ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && ( minTexelOffset == rhs.minTexelOffset ) && + ( maxTexelOffset == rhs.maxTexelOffset ) && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && + ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && ( minInterpolationOffset == rhs.minInterpolationOffset ) && + ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && + ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && + ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && + ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && + ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && ( maxColorAttachments == rhs.maxColorAttachments ) && + ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && + ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && + ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && + ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && + ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && + ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && ( maxCullDistances == rhs.maxCullDistances ) && + ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && + ( pointSizeRange == rhs.pointSizeRange ) && ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && + ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && + ( standardSampleLocations == rhs.standardSampleLocations ) && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && + ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); +# endif + } + + bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; + uint32_t maxPerStageDescriptorSamplers = {}; + uint32_t maxPerStageDescriptorUniformBuffers = {}; + uint32_t maxPerStageDescriptorStorageBuffers = {}; + uint32_t maxPerStageDescriptorSampledImages = {}; + uint32_t maxPerStageDescriptorStorageImages = {}; + uint32_t maxPerStageDescriptorInputAttachments = {}; + uint32_t maxPerStageResources = {}; + uint32_t maxDescriptorSetSamplers = {}; + uint32_t maxDescriptorSetUniformBuffers = {}; + uint32_t maxDescriptorSetUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetStorageBuffers = {}; + uint32_t maxDescriptorSetStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetSampledImages = {}; + uint32_t maxDescriptorSetStorageImages = {}; + uint32_t maxDescriptorSetInputAttachments = {}; + uint32_t maxVertexInputAttributes = {}; + uint32_t maxVertexInputBindings = {}; + uint32_t maxVertexInputAttributeOffset = {}; + uint32_t maxVertexInputBindingStride = {}; + uint32_t maxVertexOutputComponents = {}; + uint32_t maxTessellationGenerationLevel = {}; + uint32_t maxTessellationPatchSize = {}; + uint32_t maxTessellationControlPerVertexInputComponents = {}; + uint32_t maxTessellationControlPerVertexOutputComponents = {}; + uint32_t maxTessellationControlPerPatchOutputComponents = {}; + uint32_t maxTessellationControlTotalOutputComponents = {}; + uint32_t maxTessellationEvaluationInputComponents = {}; + uint32_t maxTessellationEvaluationOutputComponents = {}; + uint32_t maxGeometryShaderInvocations = {}; + uint32_t maxGeometryInputComponents = {}; + uint32_t maxGeometryOutputComponents = {}; + uint32_t maxGeometryOutputVertices = {}; + uint32_t maxGeometryTotalOutputComponents = {}; + uint32_t maxFragmentInputComponents = {}; + uint32_t maxFragmentOutputAttachments = {}; + uint32_t maxFragmentDualSrcAttachments = {}; + uint32_t maxFragmentCombinedOutputResources = {}; + uint32_t maxComputeSharedMemorySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupCount = {}; + uint32_t maxComputeWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxComputeWorkGroupSize = {}; + uint32_t subPixelPrecisionBits = {}; + uint32_t subTexelPrecisionBits = {}; + uint32_t mipmapPrecisionBits = {}; + uint32_t maxDrawIndexedIndexValue = {}; + uint32_t maxDrawIndirectCount = {}; + float maxSamplerLodBias = {}; + float maxSamplerAnisotropy = {}; + uint32_t maxViewports = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxViewportDimensions = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D viewportBoundsRange = {}; + uint32_t viewportSubPixelBits = {}; + size_t minMemoryMapAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; + int32_t minTexelOffset = {}; + uint32_t maxTexelOffset = {}; + int32_t minTexelGatherOffset = {}; + uint32_t maxTexelGatherOffset = {}; + float minInterpolationOffset = {}; + float maxInterpolationOffset = {}; + uint32_t subPixelInterpolationOffsetBits = {}; + uint32_t maxFramebufferWidth = {}; + uint32_t maxFramebufferHeight = {}; + uint32_t maxFramebufferLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; + uint32_t maxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; + uint32_t maxSampleMaskWords = {}; + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; + float timestampPeriod = {}; + uint32_t maxClipDistances = {}; + uint32_t maxCullDistances = {}; + uint32_t maxCombinedClipAndCullDistances = {}; + uint32_t discreteQueuePriorities = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pointSizeRange = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lineWidthRange = {}; + float pointSizeGranularity = {}; + float lineWidthGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; + }; + + struct PhysicalDeviceLineRasterizationFeaturesEXT + { + using NativeType = VkPhysicalDeviceLineRasterizationFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rectangularLines( rectangularLines_ ) + , bresenhamLines( bresenhamLines_ ) + , smoothLines( smoothLines_ ) + , stippledRectangularLines( stippledRectangularLines_ ) + , stippledBresenhamLines( stippledBresenhamLines_ ) + , stippledSmoothLines( stippledSmoothLines_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & + setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && + ( smoothLines == rhs.smoothLines ) && ( stippledRectangularLines == rhs.stippledRectangularLines ) && + ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && ( stippledSmoothLines == rhs.stippledSmoothLines ); +# endif + } + + bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationFeaturesEXT; + }; + + struct PhysicalDeviceLineRasterizationPropertiesEXT + { + using NativeType = VkPhysicalDeviceLineRasterizationPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lineSubPixelPrecisionBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); +# endif + } + + bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceLinearColorAttachmentFeaturesNV + { + using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , linearColorAttachment( linearColorAttachment_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & + setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + linearColorAttachment = linearColorAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, linearColorAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearColorAttachment == rhs.linearColorAttachment ); +# endif + } + + bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV; + }; + + struct PhysicalDeviceMaintenance3Properties + { + using NativeType = VkPhysicalDeviceMaintenance3Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxPerSetDescriptors( maxPerSetDescriptors_ ) + , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance3Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + void * pNext = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance3Properties; + }; + + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + + struct PhysicalDeviceMaintenance4Features + { + using NativeType = VkPhysicalDeviceMaintenance4Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maintenance4( maintenance4_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance4Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT + { + maintenance4 = maintenance4_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance4Features; + }; + + using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; + + struct PhysicalDeviceMaintenance4Properties + { + using NativeType = VkPhysicalDeviceMaintenance4Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxBufferSize( maxBufferSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance4Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxBufferSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance4Properties; + }; + + using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; + + struct PhysicalDeviceMaintenance5FeaturesKHR + { + using NativeType = VkPhysicalDeviceMaintenance5FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maintenance5( maintenance5_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance5FeaturesKHR( VkPhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance5FeaturesKHR & operator=( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance5FeaturesKHR & operator=( VkPhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5FeaturesKHR & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT + { + maintenance5 = maintenance5_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMaintenance5FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance5 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance5FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance5 == rhs.maintenance5 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance5FeaturesKHR; + }; + + struct PhysicalDeviceMaintenance5PropertiesKHR + { + using NativeType = VkPhysicalDeviceMaintenance5PropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5PropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , earlyFragmentMultisampleCoverageAfterSampleCounting( earlyFragmentMultisampleCoverageAfterSampleCounting_ ) + , earlyFragmentSampleMaskTestBeforeSampleCounting( earlyFragmentSampleMaskTestBeforeSampleCounting_ ) + , depthStencilSwizzleOneSupport( depthStencilSwizzleOneSupport_ ) + , polygonModePointSize( polygonModePointSize_ ) + , nonStrictSinglePixelWideLinesUseParallelogram( nonStrictSinglePixelWideLinesUseParallelogram_ ) + , nonStrictWideLinesUseParallelogram( nonStrictWideLinesUseParallelogram_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5PropertiesKHR( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance5PropertiesKHR( VkPhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5PropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance5PropertiesKHR & operator=( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance5PropertiesKHR & operator=( VkPhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance5PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5PropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + earlyFragmentMultisampleCoverageAfterSampleCounting, + earlyFragmentSampleMaskTestBeforeSampleCounting, + depthStencilSwizzleOneSupport, + polygonModePointSize, + nonStrictSinglePixelWideLinesUseParallelogram, + nonStrictWideLinesUseParallelogram ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance5PropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) && + ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) && + ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) && + ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) && + ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5PropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {}; + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance5PropertiesKHR; + }; + + struct PhysicalDeviceMaintenance6FeaturesKHR + { + using NativeType = VkPhysicalDeviceMaintenance6FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maintenance6( maintenance6_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6FeaturesKHR( PhysicalDeviceMaintenance6FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance6FeaturesKHR( VkPhysicalDeviceMaintenance6FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance6FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance6FeaturesKHR & operator=( PhysicalDeviceMaintenance6FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance6FeaturesKHR & operator=( VkPhysicalDeviceMaintenance6FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6FeaturesKHR & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT + { + maintenance6 = maintenance6_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMaintenance6FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance6 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance6FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance6FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance6 == rhs.maintenance6 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance6FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance6FeaturesKHR; + }; + + struct PhysicalDeviceMaintenance6PropertiesKHR + { + using NativeType = VkPhysicalDeviceMaintenance6PropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6PropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, + uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , blockTexelViewCompatibleMultipleLayers( blockTexelViewCompatibleMultipleLayers_ ) + , maxCombinedImageSamplerDescriptorCount( maxCombinedImageSamplerDescriptorCount_ ) + , fragmentShadingRateClampCombinerInputs( fragmentShadingRateClampCombinerInputs_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6PropertiesKHR( PhysicalDeviceMaintenance6PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance6PropertiesKHR( VkPhysicalDeviceMaintenance6PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance6PropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance6PropertiesKHR & operator=( PhysicalDeviceMaintenance6PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance6PropertiesKHR & operator=( VkPhysicalDeviceMaintenance6PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance6PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6PropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, blockTexelViewCompatibleMultipleLayers, maxCombinedImageSamplerDescriptorCount, fragmentShadingRateClampCombinerInputs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance6PropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance6PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) && + ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) && + ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance6PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6PropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {}; + uint32_t maxCombinedImageSamplerDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance6PropertiesKHR; + }; + + struct PhysicalDeviceMemoryBudgetPropertiesEXT + { + using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const & heapBudget_ = {}, + std::array const & heapUsage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , heapBudget( heapBudget_ ) + , heapUsage( heapUsage_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, heapBudget, heapUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && ( heapUsage == rhs.heapUsage ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; + }; + + struct PhysicalDeviceMemoryDecompressionFeaturesNV + { + using NativeType = VkPhysicalDeviceMemoryDecompressionFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryDecompression( memoryDecompression_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryDecompressionFeaturesNV( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryDecompressionFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & + setMemoryDecompression( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ ) VULKAN_HPP_NOEXCEPT + { + memoryDecompression = memoryDecompression_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryDecompression ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryDecompressionFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryDecompression == rhs.memoryDecompression ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryDecompressionFeaturesNV; + }; + + struct PhysicalDeviceMemoryDecompressionPropertiesNV + { + using NativeType = VkPhysicalDeviceMemoryDecompressionPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods_ = {}, + uint64_t maxDecompressionIndirectCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , decompressionMethods( decompressionMethods_ ) + , maxDecompressionIndirectCount( maxDecompressionIndirectCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMemoryDecompressionPropertiesNV( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryDecompressionPropertiesNV( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryDecompressionPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decompressionMethods, maxDecompressionIndirectCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryDecompressionPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decompressionMethods == rhs.decompressionMethods ) && + ( maxDecompressionIndirectCount == rhs.maxDecompressionIndirectCount ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods = {}; + uint64_t maxDecompressionIndirectCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryDecompressionPropertiesNV; + }; + + struct PhysicalDeviceMemoryPriorityFeaturesEXT + { + using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryPriority( memoryPriority_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + { + memoryPriority = memoryPriority_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryPriority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; + }; + + struct PhysicalDeviceMemoryProperties + { + using NativeType = VkPhysicalDeviceMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, + std::array const & memoryTypes_ = {}, + uint32_t memoryHeapCount_ = {}, + std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeCount( memoryTypeCount_ ) + , memoryTypes( memoryTypes_ ) + , memoryHeapCount( memoryHeapCount_ ) + , memoryHeaps( memoryHeaps_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && ( memoryHeapCount == rhs.memoryHeapCount ) && + ( memoryHeaps == rhs.memoryHeaps ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; + }; + + struct PhysicalDeviceMemoryProperties2 + { + using NativeType = VkPhysicalDeviceMemoryProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryProperties( memoryProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryProperties2; + }; + + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + struct PhysicalDeviceMeshShaderFeaturesEXT + { + using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , taskShader( taskShader_ ) + , meshShader( meshShader_ ) + , multiviewMeshShader( multiviewMeshShader_ ) + , primitiveFragmentShadingRateMeshShader( primitiveFragmentShadingRateMeshShader_ ) + , meshShaderQueries( meshShaderQueries_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & + setMultiviewMeshShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewMeshShader = multiviewMeshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & + setPrimitiveFragmentShadingRateMeshShader( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ ) VULKAN_HPP_NOEXCEPT + { + meshShaderQueries = meshShaderQueries_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ) && + ( multiviewMeshShader == rhs.multiviewMeshShader ) && ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader ) && + ( meshShaderQueries == rhs.meshShaderQueries ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesEXT; + }; + + struct PhysicalDeviceMeshShaderFeaturesNV + { + using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , taskShader( taskShader_ ) + , meshShader( meshShader_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, taskShader, meshShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesNV; + }; + + struct PhysicalDeviceMeshShaderPropertiesEXT + { + using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( uint32_t maxTaskWorkGroupTotalCount_ = {}, + std::array const & maxTaskWorkGroupCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskPayloadSize_ = {}, + uint32_t maxTaskSharedMemorySize_ = {}, + uint32_t maxTaskPayloadAndSharedMemorySize_ = {}, + uint32_t maxMeshWorkGroupTotalCount_ = {}, + std::array const & maxMeshWorkGroupCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshSharedMemorySize_ = {}, + uint32_t maxMeshPayloadAndSharedMemorySize_ = {}, + uint32_t maxMeshOutputMemorySize_ = {}, + uint32_t maxMeshPayloadAndOutputMemorySize_ = {}, + uint32_t maxMeshOutputComponents_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshOutputLayers_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {}, + uint32_t maxPreferredTaskWorkGroupInvocations_ = {}, + uint32_t maxPreferredMeshWorkGroupInvocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxTaskWorkGroupTotalCount( maxTaskWorkGroupTotalCount_ ) + , maxTaskWorkGroupCount( maxTaskWorkGroupCount_ ) + , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) + , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) + , maxTaskPayloadSize( maxTaskPayloadSize_ ) + , maxTaskSharedMemorySize( maxTaskSharedMemorySize_ ) + , maxTaskPayloadAndSharedMemorySize( maxTaskPayloadAndSharedMemorySize_ ) + , maxMeshWorkGroupTotalCount( maxMeshWorkGroupTotalCount_ ) + , maxMeshWorkGroupCount( maxMeshWorkGroupCount_ ) + , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) + , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) + , maxMeshSharedMemorySize( maxMeshSharedMemorySize_ ) + , maxMeshPayloadAndSharedMemorySize( maxMeshPayloadAndSharedMemorySize_ ) + , maxMeshOutputMemorySize( maxMeshOutputMemorySize_ ) + , maxMeshPayloadAndOutputMemorySize( maxMeshPayloadAndOutputMemorySize_ ) + , maxMeshOutputComponents( maxMeshOutputComponents_ ) + , maxMeshOutputVertices( maxMeshOutputVertices_ ) + , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) + , maxMeshOutputLayers( maxMeshOutputLayers_ ) + , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) + , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) + , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + , maxPreferredTaskWorkGroupInvocations( maxPreferredTaskWorkGroupInvocations_ ) + , maxPreferredMeshWorkGroupInvocations( maxPreferredMeshWorkGroupInvocations_ ) + , prefersLocalInvocationVertexOutput( prefersLocalInvocationVertexOutput_ ) + , prefersLocalInvocationPrimitiveOutput( prefersLocalInvocationPrimitiveOutput_ ) + , prefersCompactVertexOutput( prefersCompactVertexOutput_ ) + , prefersCompactPrimitiveOutput( prefersCompactPrimitiveOutput_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxTaskWorkGroupTotalCount, + maxTaskWorkGroupCount, + maxTaskWorkGroupInvocations, + maxTaskWorkGroupSize, + maxTaskPayloadSize, + maxTaskSharedMemorySize, + maxTaskPayloadAndSharedMemorySize, + maxMeshWorkGroupTotalCount, + maxMeshWorkGroupCount, + maxMeshWorkGroupInvocations, + maxMeshWorkGroupSize, + maxMeshSharedMemorySize, + maxMeshPayloadAndSharedMemorySize, + maxMeshOutputMemorySize, + maxMeshPayloadAndOutputMemorySize, + maxMeshOutputComponents, + maxMeshOutputVertices, + maxMeshOutputPrimitives, + maxMeshOutputLayers, + maxMeshMultiviewViewCount, + meshOutputPerVertexGranularity, + meshOutputPerPrimitiveGranularity, + maxPreferredTaskWorkGroupInvocations, + maxPreferredMeshWorkGroupInvocations, + prefersLocalInvocationVertexOutput, + prefersLocalInvocationPrimitiveOutput, + prefersCompactVertexOutput, + prefersCompactPrimitiveOutput ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount ) && + ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && + ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskPayloadSize == rhs.maxTaskPayloadSize ) && + ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize ) && ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize ) && + ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount ) && ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize ) && ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize ) && + ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize ) && ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize ) && + ( maxMeshOutputComponents == rhs.maxMeshOutputComponents ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshOutputLayers == rhs.maxMeshOutputLayers ) && + ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ) && + ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations ) && + ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations ) && + ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput ) && + ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput ) && + ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput ) && ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; + void * pNext = {}; + uint32_t maxTaskWorkGroupTotalCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskPayloadSize = {}; + uint32_t maxTaskSharedMemorySize = {}; + uint32_t maxTaskPayloadAndSharedMemorySize = {}; + uint32_t maxMeshWorkGroupTotalCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshSharedMemorySize = {}; + uint32_t maxMeshPayloadAndSharedMemorySize = {}; + uint32_t maxMeshOutputMemorySize = {}; + uint32_t maxMeshPayloadAndOutputMemorySize = {}; + uint32_t maxMeshOutputComponents = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshOutputLayers = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + uint32_t maxPreferredTaskWorkGroupInvocations = {}; + uint32_t maxPreferredMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesEXT; + }; + + struct PhysicalDeviceMeshShaderPropertiesNV + { + using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskTotalMemorySize_ = {}, + uint32_t maxTaskOutputCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshTotalMemorySize_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) + , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) + , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) + , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) + , maxTaskOutputCount( maxTaskOutputCount_ ) + , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) + , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) + , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) + , maxMeshOutputVertices( maxMeshOutputVertices_ ) + , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) + , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) + , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) + , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxDrawMeshTasksCount, + maxTaskWorkGroupInvocations, + maxTaskWorkGroupSize, + maxTaskTotalMemorySize, + maxTaskOutputCount, + maxMeshWorkGroupInvocations, + maxMeshWorkGroupSize, + maxMeshTotalMemorySize, + maxMeshOutputVertices, + maxMeshOutputPrimitives, + maxMeshMultiviewViewCount, + meshOutputPerVertexGranularity, + meshOutputPerPrimitiveGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && + ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && + ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && + ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + void * pNext = {}; + uint32_t maxDrawMeshTasksCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskTotalMemorySize = {}; + uint32_t maxTaskOutputCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshTotalMemorySize = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesNV; + }; + + struct PhysicalDeviceMultiDrawFeaturesEXT + { + using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , multiDraw( multiDraw_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT + { + multiDraw = multiDraw_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiDraw ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiDraw == rhs.multiDraw ); +# endif + } + + bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiDrawFeaturesEXT; + }; + + struct PhysicalDeviceMultiDrawPropertiesEXT + { + using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxMultiDrawCount( maxMultiDrawCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxMultiDrawCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiDrawCount == rhs.maxMultiDrawCount ); +# endif + } + + bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + void * pNext = {}; + uint32_t maxMultiDrawCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiDrawPropertiesEXT; + }; + + struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT + { + using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , multisampledRenderToSingleSampled( multisampledRenderToSingleSampled_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + setMultisampledRenderToSingleSampled( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ ) VULKAN_HPP_NOEXCEPT + { + multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multisampledRenderToSingleSampled ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled ); +# endif + } + + bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + }; + + struct PhysicalDeviceMultiviewFeatures + { + using NativeType = VkPhysicalDeviceMultiviewFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewFeatures; + }; + + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , perViewPositionAllComponents( perViewPositionAllComponents_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewPositionAllComponents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + }; + + struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM + { + using NativeType = VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , multiviewPerViewRenderAreas( multiviewPerViewRenderAreas_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & + operator=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & + setMultiviewPerViewRenderAreas( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + multiviewPerViewRenderAreas = multiviewPerViewRenderAreas_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiviewPerViewRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewRenderAreas == rhs.multiviewPerViewRenderAreas ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + }; + + struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM + { + using NativeType = VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , multiviewPerViewViewports( multiviewPerViewViewports_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & + operator=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & + setMultiviewPerViewViewports( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ ) VULKAN_HPP_NOEXCEPT + { + multiviewPerViewViewports = multiviewPerViewViewports_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiviewPerViewViewports ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewViewports == rhs.multiviewPerViewViewports ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + }; + + struct PhysicalDeviceMultiviewProperties + { + using NativeType = VkPhysicalDeviceMultiviewProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxMultiviewViewCount( maxMultiviewViewCount_ ) + , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + void * pNext = {}; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewProperties; + }; + + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + + struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT + { + using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , mutableDescriptorType( mutableDescriptorType_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & + setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorType = mutableDescriptorType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mutableDescriptorType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType ); +# endif + } + + bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + }; + + using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + + struct PhysicalDeviceNestedCommandBufferFeaturesEXT + { + using NativeType = VkPhysicalDeviceNestedCommandBufferFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , nestedCommandBuffer( nestedCommandBuffer_ ) + , nestedCommandBufferRendering( nestedCommandBufferRendering_ ) + , nestedCommandBufferSimultaneousUse( nestedCommandBufferSimultaneousUse_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNestedCommandBufferFeaturesEXT( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNestedCommandBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBuffer( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBuffer = nestedCommandBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBufferRendering( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBufferRendering = nestedCommandBufferRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBufferSimultaneousUse( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBufferSimultaneousUse = nestedCommandBufferSimultaneousUse_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nestedCommandBuffer, nestedCommandBufferRendering, nestedCommandBufferSimultaneousUse ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNestedCommandBufferFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nestedCommandBuffer == rhs.nestedCommandBuffer ) && + ( nestedCommandBufferRendering == rhs.nestedCommandBufferRendering ) && + ( nestedCommandBufferSimultaneousUse == rhs.nestedCommandBufferSimultaneousUse ); +# endif + } + + bool operator!=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNestedCommandBufferFeaturesEXT; + }; + + struct PhysicalDeviceNestedCommandBufferPropertiesEXT + { + using NativeType = VkPhysicalDeviceNestedCommandBufferPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT( uint32_t maxCommandBufferNestingLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxCommandBufferNestingLevel( maxCommandBufferNestingLevel_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceNestedCommandBufferPropertiesEXT( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNestedCommandBufferPropertiesEXT( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNestedCommandBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferPropertiesEXT & + setMaxCommandBufferNestingLevel( uint32_t maxCommandBufferNestingLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxCommandBufferNestingLevel = maxCommandBufferNestingLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxCommandBufferNestingLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNestedCommandBufferPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCommandBufferNestingLevel == rhs.maxCommandBufferNestingLevel ); +# endif + } + + bool operator!=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT; + void * pNext = {}; + uint32_t maxCommandBufferNestingLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNestedCommandBufferPropertiesEXT; + }; + + struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT + { + using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , nonSeamlessCubeMap( nonSeamlessCubeMap_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & + setNonSeamlessCubeMap( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ ) VULKAN_HPP_NOEXCEPT + { + nonSeamlessCubeMap = nonSeamlessCubeMap_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nonSeamlessCubeMap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap ); +# endif + } + + bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + }; + + struct PhysicalDeviceOpacityMicromapFeaturesEXT + { + using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 micromap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , micromap( micromap_ ) + , micromapCaptureReplay( micromapCaptureReplay_ ) + , micromapHostCommands( micromapHostCommands_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( VULKAN_HPP_NAMESPACE::Bool32 micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & + setMicromapCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + micromapCaptureReplay = micromapCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & + setMicromapHostCommands( VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + micromapHostCommands = micromapHostCommands_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromap == rhs.micromap ) && ( micromapCaptureReplay == rhs.micromapCaptureReplay ) && + ( micromapHostCommands == rhs.micromapHostCommands ); +# endif + } + + bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromap = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpacityMicromapFeaturesEXT; + }; + + struct PhysicalDeviceOpacityMicromapPropertiesEXT + { + using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( uint32_t maxOpacity2StateSubdivisionLevel_ = {}, + uint32_t maxOpacity4StateSubdivisionLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxOpacity2StateSubdivisionLevel( maxOpacity2StateSubdivisionLevel_ ) + , maxOpacity4StateSubdivisionLevel( maxOpacity4StateSubdivisionLevel_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel ) && + ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel ); +# endif + } + + bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT; + void * pNext = {}; + uint32_t maxOpacity2StateSubdivisionLevel = {}; + uint32_t maxOpacity4StateSubdivisionLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpacityMicromapPropertiesEXT; + }; + + struct PhysicalDeviceOpticalFlowFeaturesNV + { + using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , opticalFlow( opticalFlow_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ ) VULKAN_HPP_NOEXCEPT + { + opticalFlow = opticalFlow_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opticalFlow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opticalFlow == rhs.opticalFlow ); +# endif + } + + bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 opticalFlow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpticalFlowFeaturesNV; + }; + + struct PhysicalDeviceOpticalFlowPropertiesNV + { + using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hintSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 costSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported_ = {}, + uint32_t minWidth_ = {}, + uint32_t minHeight_ = {}, + uint32_t maxWidth_ = {}, + uint32_t maxHeight_ = {}, + uint32_t maxNumRegionsOfInterest_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , supportedOutputGridSizes( supportedOutputGridSizes_ ) + , supportedHintGridSizes( supportedHintGridSizes_ ) + , hintSupported( hintSupported_ ) + , costSupported( costSupported_ ) + , bidirectionalFlowSupported( bidirectionalFlowSupported_ ) + , globalFlowSupported( globalFlowSupported_ ) + , minWidth( minWidth_ ) + , minHeight( minHeight_ ) + , maxWidth( maxWidth_ ) + , maxHeight( maxHeight_ ) + , maxNumRegionsOfInterest( maxNumRegionsOfInterest_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + supportedOutputGridSizes, + supportedHintGridSizes, + hintSupported, + costSupported, + bidirectionalFlowSupported, + globalFlowSupported, + minWidth, + minHeight, + maxWidth, + maxHeight, + maxNumRegionsOfInterest ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedOutputGridSizes == rhs.supportedOutputGridSizes ) && + ( supportedHintGridSizes == rhs.supportedHintGridSizes ) && ( hintSupported == rhs.hintSupported ) && ( costSupported == rhs.costSupported ) && + ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported ) && ( globalFlowSupported == rhs.globalFlowSupported ) && + ( minWidth == rhs.minWidth ) && ( minHeight == rhs.minHeight ) && ( maxWidth == rhs.maxWidth ) && ( maxHeight == rhs.maxHeight ) && + ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest ); +# endif + } + + bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes = {}; + VULKAN_HPP_NAMESPACE::Bool32 hintSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 costSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported = {}; + uint32_t minWidth = {}; + uint32_t minHeight = {}; + uint32_t maxWidth = {}; + uint32_t maxHeight = {}; + uint32_t maxNumRegionsOfInterest = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpticalFlowPropertiesNV; + }; + + struct PhysicalDevicePCIBusInfoPropertiesEXT + { + using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( + uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pciDomain( pciDomain_ ) + , pciBus( pciBus_ ) + , pciDevice( pciDevice_ ) + , pciFunction( pciFunction_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && + ( pciFunction == rhs.pciFunction ); +# endif + } + + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + void * pNext = {}; + uint32_t pciDomain = {}; + uint32_t pciBus = {}; + uint32_t pciDevice = {}; + uint32_t pciFunction = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePCIBusInfoPropertiesEXT; + }; + + struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT + { + using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pageableDeviceLocalMemory( pageableDeviceLocalMemory_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT + { + pageableDeviceLocalMemory = pageableDeviceLocalMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pageableDeviceLocalMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory ); +# endif + } + + bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + }; + + struct PhysicalDevicePerStageDescriptorSetFeaturesNV + { + using NativeType = VkPhysicalDevicePerStageDescriptorSetFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , perStageDescriptorSet( perStageDescriptorSet_ ) + , dynamicPipelineLayout( dynamicPipelineLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePerStageDescriptorSetFeaturesNV( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerStageDescriptorSetFeaturesNV( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerStageDescriptorSetFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & + setPerStageDescriptorSet( VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ ) VULKAN_HPP_NOEXCEPT + { + perStageDescriptorSet = perStageDescriptorSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & + setDynamicPipelineLayout( VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + dynamicPipelineLayout = dynamicPipelineLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perStageDescriptorSet, dynamicPipelineLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerStageDescriptorSetFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perStageDescriptorSet == rhs.perStageDescriptorSet ) && + ( dynamicPipelineLayout == rhs.dynamicPipelineLayout ); +# endif + } + + bool operator!=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerStageDescriptorSetFeaturesNV; + }; + + struct PhysicalDevicePerformanceQueryFeaturesKHR + { + using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , performanceCounterQueryPools( performanceCounterQueryPools_ ) + , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterQueryPools = performanceCounterQueryPools_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && + ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); +# endif + } + + bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryFeaturesKHR; + }; + + struct PhysicalDevicePerformanceQueryPropertiesKHR + { + using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allowCommandBufferQueryCopies ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); +# endif + } + + bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryPropertiesKHR; + }; + + struct PhysicalDevicePipelineCreationCacheControlFeatures + { + using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineCreationCacheControl( pipelineCreationCacheControl_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineCreationCacheControlFeatures & + operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineCreationCacheControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); +# endif + } + + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineCreationCacheControlFeatures; + }; + + using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; + + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + { + using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineExecutableInfo( pipelineExecutableInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + { + pipelineExecutableInfo = pipelineExecutableInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineExecutableInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); +# endif + } + + bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + }; + + struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT + { + using NativeType = VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineLibraryGroupHandles( pipelineLibraryGroupHandles_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & + operator=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & operator=( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & + setPipelineLibraryGroupHandles( VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLibraryGroupHandles = pipelineLibraryGroupHandles_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineLibraryGroupHandles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineLibraryGroupHandles == rhs.pipelineLibraryGroupHandles ); +# endif + } + + bool operator!=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + }; + + struct PhysicalDevicePipelinePropertiesFeaturesEXT + { + using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelinePropertiesIdentifier( pipelinePropertiesIdentifier_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & + setPipelinePropertiesIdentifier( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelinePropertiesIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier ); +# endif + } + + bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelinePropertiesFeaturesEXT; + }; + + struct PhysicalDevicePipelineProtectedAccessFeaturesEXT + { + using NativeType = VkPhysicalDevicePipelineProtectedAccessFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineProtectedAccess( pipelineProtectedAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineProtectedAccessFeaturesEXT( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineProtectedAccessFeaturesEXT( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineProtectedAccessFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT & + setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT + { + pipelineProtectedAccess = pipelineProtectedAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineProtectedAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineProtectedAccess == rhs.pipelineProtectedAccess ); +# endif + } + + bool operator!=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineProtectedAccessFeaturesEXT; + }; + + struct PhysicalDevicePipelineRobustnessFeaturesEXT + { + using NativeType = VkPhysicalDevicePipelineRobustnessFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineRobustness( pipelineRobustness_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineRobustnessFeaturesEXT( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineRobustnessFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & + setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT + { + pipelineRobustness = pipelineRobustness_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePipelineRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineRobustness ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineRobustnessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineRobustness == rhs.pipelineRobustness ); +# endif + } + + bool operator!=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineRobustnessFeaturesEXT; + }; + + struct PhysicalDevicePipelineRobustnessPropertiesEXT + { + using NativeType = VkPhysicalDevicePipelineRobustnessPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineRobustnessPropertiesEXT( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , defaultRobustnessStorageBuffers( defaultRobustnessStorageBuffers_ ) + , defaultRobustnessUniformBuffers( defaultRobustnessUniformBuffers_ ) + , defaultRobustnessVertexInputs( defaultRobustnessVertexInputs_ ) + , defaultRobustnessImages( defaultRobustnessImages_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineRobustnessPropertiesEXT( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineRobustnessPropertiesEXT( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineRobustnessPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePipelineRobustnessPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineRobustnessPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) && + ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) && + ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ); +# endif + } + + bool operator!=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineRobustnessPropertiesEXT; + }; + + struct PhysicalDevicePointClippingProperties + { + using NativeType = VkPhysicalDevicePointClippingProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pointClippingBehavior( pointClippingBehavior_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePointClippingProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pointClippingBehavior ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); +# endif + } + + bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePointClippingProperties; + }; + + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetFeaturesKHR + { + using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ) + , events( events_ ) + , imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ) + , imageViewFormatSwizzle( imageViewFormatSwizzle_ ) + , imageView2DOn3DImage( imageView2DOn3DImage_ ) + , multisampleArrayImage( multisampleArrayImage_ ) + , mutableComparisonSamplers( mutableComparisonSamplers_ ) + , pointPolygons( pointPolygons_ ) + , samplerMipLodBias( samplerMipLodBias_ ) + , separateStencilMaskRef( separateStencilMaskRef_ ) + , shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ) + , tessellationIsolines( tessellationIsolines_ ) + , tessellationPointMode( tessellationPointMode_ ) + , triangleFans( triangleFans_ ) + , vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT + { + constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT + { + events = events_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatReinterpretation = imageViewFormatReinterpretation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatSwizzle = imageViewFormatSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT + { + imageView2DOn3DImage = imageView2DOn3DImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT + { + multisampleArrayImage = multisampleArrayImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT + { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT + { + pointPolygons = pointPolygons_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + samplerMipLodBias = samplerMipLodBias_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT + { + separateStencilMaskRef = separateStencilMaskRef_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT + { + tessellationIsolines = tessellationIsolines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT + { + tessellationPointMode = tessellationPointMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT + { + triangleFans = triangleFans_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + constantAlphaColorBlendFactors, + events, + imageViewFormatReinterpretation, + imageViewFormatSwizzle, + imageView2DOn3DImage, + multisampleArrayImage, + mutableComparisonSamplers, + pointPolygons, + samplerMipLodBias, + separateStencilMaskRef, + shaderSampleRateInterpolationFunctions, + tessellationIsolines, + tessellationPointMode, + triangleFans, + vertexAttributeAccessBeyondStride ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && + ( events == rhs.events ) && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) && + ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) && + ( multisampleArrayImage == rhs.multisampleArrayImage ) && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && + ( pointPolygons == rhs.pointPolygons ) && ( samplerMipLodBias == rhs.samplerMipLodBias ) && + ( separateStencilMaskRef == rhs.separateStencilMaskRef ) && + ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && ( tessellationIsolines == rhs.tessellationIsolines ) && + ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) && + ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); +# endif + } + + bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; + VULKAN_HPP_NAMESPACE::Bool32 events = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetPropertiesKHR + { + using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( uint32_t minVertexInputBindingStrideAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & + setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minVertexInputBindingStrideAlignment ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); +# endif + } + + bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + void * pNext = {}; + uint32_t minVertexInputBindingStrideAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDevicePresentBarrierFeaturesNV + { + using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentBarrier( presentBarrier_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ ) VULKAN_HPP_NOEXCEPT + { + presentBarrier = presentBarrier_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentBarrier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier ); +# endif + } + + bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentBarrierFeaturesNV; + }; + + struct PhysicalDevicePresentIdFeaturesKHR + { + using NativeType = VkPhysicalDevicePresentIdFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentId( presentId_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT + { + presentId = presentId_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ); +# endif + } + + bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentId = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentIdFeaturesKHR; + }; + + struct PhysicalDevicePresentWaitFeaturesKHR + { + using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentWait( presentWait_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT + { + presentWait = presentWait_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentWait ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait == rhs.presentWait ); +# endif + } + + bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentWait = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentWaitFeaturesKHR; + }; + + struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT + { + using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , primitiveTopologyListRestart( primitiveTopologyListRestart_ ) + , primitiveTopologyPatchListRestart( primitiveTopologyPatchListRestart_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT + { + primitiveTopologyListRestart = primitiveTopologyListRestart_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT + { + primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) && + ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart ); +# endif + } + + bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + }; + + struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT + { + using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , primitivesGeneratedQuery( primitivesGeneratedQuery_ ) + , primitivesGeneratedQueryWithRasterizerDiscard( primitivesGeneratedQueryWithRasterizerDiscard_ ) + , primitivesGeneratedQueryWithNonZeroStreams( primitivesGeneratedQueryWithNonZeroStreams_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQuery( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQuery = primitivesGeneratedQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQueryWithRasterizerDiscard( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQueryWithNonZeroStreams( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery ) && + ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard ) && + ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams ); +# endif + } + + bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + }; + + struct PhysicalDevicePrivateDataFeatures + { + using NativeType = VkPhysicalDevicePrivateDataFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , privateData( privateData_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrivateDataFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, privateData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); +# endif + } + + bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrivateDataFeatures; + }; + + using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; + + struct PhysicalDeviceSparseProperties + { + using NativeType = VkPhysicalDeviceSparseProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) + , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) + , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) + , residencyAlignedMipSize( residencyAlignedMipSize_ ) + , residencyNonResidentStrict( residencyNonResidentStrict_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( residencyStandard2DBlockShape, + residencyStandard2DMultisampleBlockShape, + residencyStandard3DBlockShape, + residencyAlignedMipSize, + residencyNonResidentStrict ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && + ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && + ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && + ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); +# endif + } + + bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + }; + + struct PhysicalDeviceProperties + { + using NativeType = VkPhysicalDeviceProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const & deviceName_ = {}, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : apiVersion( apiVersion_ ) + , driverVersion( driverVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , deviceType( deviceType_ ) + , deviceName( deviceName_ ) + , pipelineCacheUUID( pipelineCacheUUID_ ) + , limits( limits_ ) + , sparseProperties( sparseProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( deviceType == rhs.deviceType ) && ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); +# endif + } + + bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + }; + + struct PhysicalDeviceProperties2 + { + using NativeType = VkPhysicalDeviceProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , properties( properties_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProperties2; + }; + + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + struct PhysicalDeviceProtectedMemoryFeatures + { + using NativeType = VkPhysicalDeviceProtectedMemoryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , protectedMemory( protectedMemory_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); +# endif + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryFeatures; + }; + + struct PhysicalDeviceProtectedMemoryProperties + { + using NativeType = VkPhysicalDeviceProtectedMemoryProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , protectedNoFault( protectedNoFault_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedNoFault ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); +# endif + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryProperties; + }; + + struct PhysicalDeviceProvokingVertexFeaturesEXT + { + using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , provokingVertexLast( provokingVertexLast_ ) + , transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexLast = provokingVertexLast_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + setTransformFeedbackPreservesProvokingVertex( VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && + ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexFeaturesEXT; + }; + + struct PhysicalDeviceProvokingVertexPropertiesEXT + { + using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , provokingVertexModePerPipeline( provokingVertexModePerPipeline_ ) + , transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && + ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexPropertiesEXT; + }; + + struct PhysicalDevicePushDescriptorPropertiesKHR + { + using NativeType = VkPhysicalDevicePushDescriptorPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxPushDescriptors( maxPushDescriptors_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushDescriptorPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPushDescriptors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); +# endif + } + + bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + void * pNext = {}; + uint32_t maxPushDescriptors = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePushDescriptorPropertiesKHR; + }; + + struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT + { + using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , formatRgba10x6WithoutYCbCrSampler( formatRgba10x6WithoutYCbCrSampler_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & + setFormatRgba10x6WithoutYCbCrSampler( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT + { + formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler ); +# endif + } + + bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + }; + + struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT + { + using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rasterizationOrderColorAttachmentAccess( rasterizationOrderColorAttachmentAccess_ ) + , rasterizationOrderDepthAttachmentAccess( rasterizationOrderDepthAttachmentAccess_ ) + , rasterizationOrderStencilAttachmentAccess( rasterizationOrderStencilAttachmentAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderColorAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderDepthAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderStencilAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) && + ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess ) && + ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess ); +# endif + } + + bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + }; + + using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + struct PhysicalDeviceRayQueryFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayQuery( rayQuery_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + { + rayQuery = rayQuery_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayQuery ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery ); +# endif + } + + bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayQueryFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayTracingInvocationReorder( rayTracingInvocationReorder_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingInvocationReorderFeaturesNV( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingInvocationReorderFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV & + operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & + setRayTracingInvocationReorder( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingInvocationReorder = rayTracingInvocationReorder_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingInvocationReorder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + }; + + struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV + { + using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV( + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint_ = + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayTracingInvocationReorderReorderingHint( rayTracingInvocationReorderReorderingHint_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingInvocationReorderPropertiesNV( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingInvocationReorderPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV & + operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint = + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + }; + + struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayTracingMaintenance1( rayTracingMaintenance1_ ) + , rayTracingPipelineTraceRaysIndirect2( rayTracingPipelineTraceRaysIndirect2_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & + setRayTracingMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMaintenance1 = rayTracingMaintenance1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & + setRayTracingPipelineTraceRaysIndirect2( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 ) && + ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1 = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + }; + + struct PhysicalDeviceRayTracingMotionBlurFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayTracingMotionBlur( rayTracingMotionBlur_ ) + , rayTracingMotionBlurPipelineTraceRaysIndirect( rayTracingMotionBlurPipelineTraceRaysIndirect_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & + setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMotionBlur = rayTracingMotionBlur_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & + setRayTracingMotionBlurPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMotionBlur == rhs.rayTracingMotionBlur ) && + ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV; + }; + + struct PhysicalDeviceRayTracingPipelineFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayTracingPipeline( rayTracingPipeline_ ) + , rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ) + , rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) + , rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ) + , rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipeline = rayTracingPipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + { + rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + rayTracingPipeline, + rayTracingPipelineShaderGroupHandleCaptureReplay, + rayTracingPipelineShaderGroupHandleCaptureReplayMixed, + rayTracingPipelineTraceRaysIndirect, + rayTraversalPrimitiveCulling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) && + ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) && + ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPipelinePropertiesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRayRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint32_t shaderGroupHandleCaptureReplaySize_ = {}, + uint32_t maxRayDispatchInvocationCount_ = {}, + uint32_t shaderGroupHandleAlignment_ = {}, + uint32_t maxRayHitAttributeSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRayRecursionDepth( maxRayRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) + , maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ) + , shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ) + , maxRayHitAttributeSize( maxRayHitAttributeSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderGroupHandleSize, + maxRayRecursionDepth, + maxShaderGroupStride, + shaderGroupBaseAlignment, + shaderGroupHandleCaptureReplaySize, + maxRayDispatchInvocationCount, + shaderGroupHandleAlignment, + maxRayHitAttributeSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) && + ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) && + ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRayRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; + uint32_t maxRayDispatchInvocationCount = {}; + uint32_t shaderGroupHandleAlignment = {}; + uint32_t maxRayHitAttributeSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR; + }; + + struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rayTracingPositionFetch( rayTracingPositionFetch_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPositionFetchFeaturesKHR( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPositionFetchFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & + setRayTracingPositionFetch( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPositionFetch = rayTracingPositionFetch_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingPositionFetch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPositionFetch == rhs.rayTracingPositionFetch ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPropertiesNV + { + using NativeType = VkPhysicalDeviceRayTracingPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxTriangleCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxTriangleCount( maxTriangleCount_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderGroupHandleSize, + maxRecursionDepth, + maxShaderGroupStride, + shaderGroupBaseAlignment, + maxGeometryCount, + maxInstanceCount, + maxTriangleCount, + maxDescriptorSetAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( maxGeometryCount == rhs.maxGeometryCount ) && + ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxTriangleCount == rhs.maxTriangleCount ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxTriangleCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPropertiesNV; + }; + + struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG + { + using NativeType = VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , relaxedLineRasterization( relaxedLineRasterization_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & + operator=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & operator=( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & + setRelaxedLineRasterization( VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ ) VULKAN_HPP_NOEXCEPT + { + relaxedLineRasterization = relaxedLineRasterization_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, relaxedLineRasterization ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & ) const = default; +#else + bool operator==( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( relaxedLineRasterization == rhs.relaxedLineRasterization ); +# endif + } + + bool operator!=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + }; + + struct PhysicalDeviceRenderPassStripedFeaturesARM + { + using NativeType = VkPhysicalDeviceRenderPassStripedFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , renderPassStriped( renderPassStriped_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRenderPassStripedFeaturesARM( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRenderPassStripedFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRenderPassStripedFeaturesARM & operator=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRenderPassStripedFeaturesARM & operator=( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & + setRenderPassStriped( VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ ) VULKAN_HPP_NOEXCEPT + { + renderPassStriped = renderPassStriped_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRenderPassStripedFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPassStriped ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRenderPassStripedFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPassStriped == rhs.renderPassStriped ); +# endif + } + + bool operator!=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRenderPassStripedFeaturesARM; + }; + + struct PhysicalDeviceRenderPassStripedPropertiesARM + { + using NativeType = VkPhysicalDeviceRenderPassStripedPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity_ = {}, + uint32_t maxRenderPassStripes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , renderPassStripeGranularity( renderPassStripeGranularity_ ) + , maxRenderPassStripes( maxRenderPassStripes_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRenderPassStripedPropertiesARM( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRenderPassStripedPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRenderPassStripedPropertiesARM & operator=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRenderPassStripedPropertiesARM & operator=( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRenderPassStripedPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPassStripeGranularity, maxRenderPassStripes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRenderPassStripedPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPassStripeGranularity == rhs.renderPassStripeGranularity ) && + ( maxRenderPassStripes == rhs.maxRenderPassStripes ); +# endif + } + + bool operator!=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity = {}; + uint32_t maxRenderPassStripes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRenderPassStripedPropertiesARM; + }; + + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + { + using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , representativeFragmentTest( representativeFragmentTest_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTest = representativeFragmentTest_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, representativeFragmentTest ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTest == rhs.representativeFragmentTest ); +# endif + } + + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + }; + + struct PhysicalDeviceRobustness2FeaturesEXT + { + using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , robustBufferAccess2( robustBufferAccess2_ ) + , robustImageAccess2( robustImageAccess2_ ) + , nullDescriptor( nullDescriptor_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & + setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess2 = robustBufferAccess2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess2 = robustImageAccess2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + nullDescriptor = nullDescriptor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) && + ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor ); +# endif + } + + bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2FeaturesEXT; + }; + + struct PhysicalDeviceRobustness2PropertiesEXT + { + using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ) + , robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) && + ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2PropertiesEXT; + }; + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, + std::array const & sampleLocationCoordinateRange_ = {}, + uint32_t sampleLocationSubPixelBits_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , sampleLocationSampleCounts( sampleLocationSampleCounts_ ) + , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ) + , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) + , variableSampleLocations( variableSampleLocations_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + sampleLocationSampleCounts, + maxSampleLocationGridSize, + sampleLocationCoordinateRange, + sampleLocationSubPixelBits, + variableSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && + ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && ( variableSampleLocations == rhs.variableSampleLocations ); +# endif + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; + uint32_t sampleLocationSubPixelBits = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSampleLocationsPropertiesEXT; + }; + + struct PhysicalDeviceSamplerFilterMinmaxProperties + { + using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); +# endif + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerFilterMinmaxProperties; + }; + + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , samplerYcbcrConversion( samplerYcbcrConversion_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, samplerYcbcrConversion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); +# endif + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; + }; + + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + + struct PhysicalDeviceScalarBlockLayoutFeatures + { + using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , scalarBlockLayout( scalarBlockLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, scalarBlockLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout ); +# endif + } + + bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceScalarBlockLayoutFeatures; + }; + + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + + struct PhysicalDeviceSchedulingControlsFeaturesARM + { + using NativeType = VkPhysicalDeviceSchedulingControlsFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , schedulingControls( schedulingControls_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSchedulingControlsFeaturesARM( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSchedulingControlsFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSchedulingControlsFeaturesARM & operator=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSchedulingControlsFeaturesARM & operator=( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & + setSchedulingControls( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ ) VULKAN_HPP_NOEXCEPT + { + schedulingControls = schedulingControls_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSchedulingControlsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, schedulingControls ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSchedulingControlsFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControls == rhs.schedulingControls ); +# endif + } + + bool operator!=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 schedulingControls = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSchedulingControlsFeaturesARM; + }; + + struct PhysicalDeviceSchedulingControlsPropertiesARM + { + using NativeType = VkPhysicalDeviceSchedulingControlsPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSchedulingControlsPropertiesARM( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , schedulingControlsFlags( schedulingControlsFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSchedulingControlsPropertiesARM( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSchedulingControlsPropertiesARM( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSchedulingControlsPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSchedulingControlsPropertiesARM & operator=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSchedulingControlsPropertiesARM & operator=( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsPropertiesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsPropertiesARM & + setSchedulingControlsFlags( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ ) VULKAN_HPP_NOEXCEPT + { + schedulingControlsFlags = schedulingControlsFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSchedulingControlsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, schedulingControlsFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSchedulingControlsPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControlsFlags == rhs.schedulingControlsFlags ); +# endif + } + + bool operator!=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSchedulingControlsPropertiesARM; + }; + + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures + { + using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, separateDepthStencilLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); +# endif + } + + bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + }; + + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderBufferFloat16Atomics( shaderBufferFloat16Atomics_ ) + , shaderBufferFloat16AtomicAdd( shaderBufferFloat16AtomicAdd_ ) + , shaderBufferFloat16AtomicMinMax( shaderBufferFloat16AtomicMinMax_ ) + , shaderBufferFloat32AtomicMinMax( shaderBufferFloat32AtomicMinMax_ ) + , shaderBufferFloat64AtomicMinMax( shaderBufferFloat64AtomicMinMax_ ) + , shaderSharedFloat16Atomics( shaderSharedFloat16Atomics_ ) + , shaderSharedFloat16AtomicAdd( shaderSharedFloat16AtomicAdd_ ) + , shaderSharedFloat16AtomicMinMax( shaderSharedFloat16AtomicMinMax_ ) + , shaderSharedFloat32AtomicMinMax( shaderSharedFloat32AtomicMinMax_ ) + , shaderSharedFloat64AtomicMinMax( shaderSharedFloat64AtomicMinMax_ ) + , shaderImageFloat32AtomicMinMax( shaderImageFloat32AtomicMinMax_ ) + , sparseImageFloat32AtomicMinMax( sparseImageFloat32AtomicMinMax_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderBufferFloat16Atomics, + shaderBufferFloat16AtomicAdd, + shaderBufferFloat16AtomicMinMax, + shaderBufferFloat32AtomicMinMax, + shaderBufferFloat64AtomicMinMax, + shaderSharedFloat16Atomics, + shaderSharedFloat16AtomicAdd, + shaderSharedFloat16AtomicMinMax, + shaderSharedFloat32AtomicMinMax, + shaderSharedFloat64AtomicMinMax, + shaderImageFloat32AtomicMinMax, + sparseImageFloat32AtomicMinMax ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics ) && + ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd ) && + ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax ) && + ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax ) && + ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax ) && ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics ) && + ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd ) && + ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax ) && + ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax ) && + ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax ) && + ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax ) && + ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ) + , shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ) + , shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ) + , shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ) + , shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ) + , shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ) + , shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ) + , shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ) + , shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ) + , shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ) + , sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ) + , sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32Atomics = shaderImageFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderBufferFloat32Atomics, + shaderBufferFloat32AtomicAdd, + shaderBufferFloat64Atomics, + shaderBufferFloat64AtomicAdd, + shaderSharedFloat32Atomics, + shaderSharedFloat32AtomicAdd, + shaderSharedFloat64Atomics, + shaderSharedFloat64AtomicAdd, + shaderImageFloat32Atomics, + shaderImageFloat32AtomicAdd, + sparseImageFloat32Atomics, + sparseImageFloat32AtomicAdd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) && + ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) && + ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) && + ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) && + ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) && + ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) && + ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicInt64Features + { + using NativeType = VkPhysicalDeviceShaderAtomicInt64Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicInt64Features; + }; + + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + + struct PhysicalDeviceShaderClockFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderSubgroupClock( shaderSubgroupClock_ ) + , shaderDeviceClock( shaderDeviceClock_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & + setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupClock = shaderSubgroupClock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderDeviceClock = shaderDeviceClock_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) && + ( shaderDeviceClock == rhs.shaderDeviceClock ); +# endif + } + + bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderClockFeaturesKHR; + }; + + struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM + { + using NativeType = VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderCoreBuiltins( shaderCoreBuiltins_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreBuiltinsFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & + setShaderCoreBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderCoreBuiltins = shaderCoreBuiltins_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreBuiltins ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreBuiltins == rhs.shaderCoreBuiltins ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + }; + + struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM + { + using NativeType = VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM( uint64_t shaderCoreMask_ = {}, + uint32_t shaderCoreCount_ = {}, + uint32_t shaderWarpsPerCore_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderCoreMask( shaderCoreMask_ ) + , shaderCoreCount( shaderCoreCount_ ) + , shaderWarpsPerCore( shaderWarpsPerCore_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderCoreBuiltinsPropertiesARM( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreMask, shaderCoreCount, shaderWarpsPerCore ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreMask == rhs.shaderCoreMask ) && ( shaderCoreCount == rhs.shaderCoreCount ) && + ( shaderWarpsPerCore == rhs.shaderWarpsPerCore ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM; + void * pNext = {}; + uint64_t shaderCoreMask = {}; + uint32_t shaderCoreCount = {}; + uint32_t shaderWarpsPerCore = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + }; + + struct PhysicalDeviceShaderCoreProperties2AMD + { + using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, + uint32_t activeComputeUnitCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderCoreFeatures( shaderCoreFeatures_ ) + , activeComputeUnitCount( activeComputeUnitCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) && + ( activeComputeUnitCount == rhs.activeComputeUnitCount ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; + uint32_t activeComputeUnitCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreProperties2AMD; + }; + + struct PhysicalDeviceShaderCorePropertiesAMD + { + using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, + uint32_t shaderArraysPerEngineCount_ = {}, + uint32_t computeUnitsPerShaderArray_ = {}, + uint32_t simdPerComputeUnit_ = {}, + uint32_t wavefrontsPerSimd_ = {}, + uint32_t wavefrontSize_ = {}, + uint32_t sgprsPerSimd_ = {}, + uint32_t minSgprAllocation_ = {}, + uint32_t maxSgprAllocation_ = {}, + uint32_t sgprAllocationGranularity_ = {}, + uint32_t vgprsPerSimd_ = {}, + uint32_t minVgprAllocation_ = {}, + uint32_t maxVgprAllocation_ = {}, + uint32_t vgprAllocationGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderEngineCount( shaderEngineCount_ ) + , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) + , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) + , simdPerComputeUnit( simdPerComputeUnit_ ) + , wavefrontsPerSimd( wavefrontsPerSimd_ ) + , wavefrontSize( wavefrontSize_ ) + , sgprsPerSimd( sgprsPerSimd_ ) + , minSgprAllocation( minSgprAllocation_ ) + , maxSgprAllocation( maxSgprAllocation_ ) + , sgprAllocationGranularity( sgprAllocationGranularity_ ) + , vgprsPerSimd( vgprsPerSimd_ ) + , minVgprAllocation( minVgprAllocation_ ) + , maxVgprAllocation( maxVgprAllocation_ ) + , vgprAllocationGranularity( vgprAllocationGranularity_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderEngineCount, + shaderArraysPerEngineCount, + computeUnitsPerShaderArray, + simdPerComputeUnit, + wavefrontsPerSimd, + wavefrontSize, + sgprsPerSimd, + minSgprAllocation, + maxSgprAllocation, + sgprAllocationGranularity, + vgprsPerSimd, + minVgprAllocation, + maxVgprAllocation, + vgprAllocationGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) && + ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) && + ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && ( wavefrontSize == rhs.wavefrontSize ) && + ( sgprsPerSimd == rhs.sgprsPerSimd ) && ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) && + ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) && + ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) && + ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + void * pNext = {}; + uint32_t shaderEngineCount = {}; + uint32_t shaderArraysPerEngineCount = {}; + uint32_t computeUnitsPerShaderArray = {}; + uint32_t simdPerComputeUnit = {}; + uint32_t wavefrontsPerSimd = {}; + uint32_t wavefrontSize = {}; + uint32_t sgprsPerSimd = {}; + uint32_t minSgprAllocation = {}; + uint32_t maxSgprAllocation = {}; + uint32_t sgprAllocationGranularity = {}; + uint32_t vgprsPerSimd = {}; + uint32_t minVgprAllocation = {}; + uint32_t maxVgprAllocation = {}; + uint32_t vgprAllocationGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesAMD; + }; + + struct PhysicalDeviceShaderCorePropertiesARM + { + using NativeType = VkPhysicalDeviceShaderCorePropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( uint32_t pixelRate_ = {}, + uint32_t texelRate_ = {}, + uint32_t fmaRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pixelRate( pixelRate_ ) + , texelRate( texelRate_ ) + , fmaRate( fmaRate_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesARM( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCorePropertiesARM & operator=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderCorePropertiesARM & operator=( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pixelRate, texelRate, fmaRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pixelRate == rhs.pixelRate ) && ( texelRate == rhs.texelRate ) && ( fmaRate == rhs.fmaRate ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesARM; + void * pNext = {}; + uint32_t pixelRate = {}; + uint32_t texelRate = {}; + uint32_t fmaRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesARM; + }; + + struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures + { + using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderDemoteToHelperInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + }; + + using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + + struct PhysicalDeviceShaderDrawParametersFeatures + { + using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderDrawParameters( shaderDrawParameters_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderDrawParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters ); +# endif + } + + bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDrawParametersFeatures; + }; + + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD + { + using NativeType = VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderEarlyAndLateFragmentTests( shaderEarlyAndLateFragmentTests_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + operator=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + operator=( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + setShaderEarlyAndLateFragmentTests( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ ) VULKAN_HPP_NOEXCEPT + { + shaderEarlyAndLateFragmentTests = shaderEarlyAndLateFragmentTests_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEarlyAndLateFragmentTests == rhs.shaderEarlyAndLateFragmentTests ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceShaderEnqueueFeaturesAMDX + { + using NativeType = VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderEnqueue( shaderEnqueue_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEnqueueFeaturesAMDX( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEnqueueFeaturesAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ ) VULKAN_HPP_NOEXCEPT + { + shaderEnqueue = shaderEnqueue_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderEnqueue ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEnqueueFeaturesAMDX const & ) const = default; +# else + bool operator==( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEnqueueFeaturesAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceShaderEnqueuePropertiesAMDX + { + using NativeType = VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {}, + uint32_t maxExecutionGraphShaderOutputNodes_ = {}, + uint32_t maxExecutionGraphShaderPayloadSize_ = {}, + uint32_t maxExecutionGraphShaderPayloadCount_ = {}, + uint32_t executionGraphDispatchAddressAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxExecutionGraphDepth( maxExecutionGraphDepth_ ) + , maxExecutionGraphShaderOutputNodes( maxExecutionGraphShaderOutputNodes_ ) + , maxExecutionGraphShaderPayloadSize( maxExecutionGraphShaderPayloadSize_ ) + , maxExecutionGraphShaderPayloadCount( maxExecutionGraphShaderPayloadCount_ ) + , executionGraphDispatchAddressAlignment( executionGraphDispatchAddressAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setMaxExecutionGraphDepth( uint32_t maxExecutionGraphDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphDepth = maxExecutionGraphDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderOutputNodes( uint32_t maxExecutionGraphShaderOutputNodes_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderOutputNodes = maxExecutionGraphShaderOutputNodes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderPayloadSize( uint32_t maxExecutionGraphShaderPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderPayloadSize = maxExecutionGraphShaderPayloadSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderPayloadCount( uint32_t maxExecutionGraphShaderPayloadCount_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderPayloadCount = maxExecutionGraphShaderPayloadCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setExecutionGraphDispatchAddressAlignment( uint32_t executionGraphDispatchAddressAlignment_ ) VULKAN_HPP_NOEXCEPT + { + executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxExecutionGraphDepth, + maxExecutionGraphShaderOutputNodes, + maxExecutionGraphShaderPayloadSize, + maxExecutionGraphShaderPayloadCount, + executionGraphDispatchAddressAlignment ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEnqueuePropertiesAMDX const & ) const = default; +# else + bool operator==( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxExecutionGraphDepth == rhs.maxExecutionGraphDepth ) && + ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes ) && + ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize ) && + ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount ) && + ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + void * pNext = {}; + uint32_t maxExecutionGraphDepth = {}; + uint32_t maxExecutionGraphShaderOutputNodes = {}; + uint32_t maxExecutionGraphShaderPayloadSize = {}; + uint32_t maxExecutionGraphShaderPayloadCount = {}; + uint32_t executionGraphDispatchAddressAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEnqueuePropertiesAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDeviceShaderFloat16Int8Features + { + using NativeType = VkPhysicalDeviceShaderFloat16Int8Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderFloat16( shaderFloat16_ ) + , shaderInt8( shaderInt8_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloat16, shaderInt8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloat16Int8Features; + }; + + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderImageInt64Atomics( shaderImageInt64Atomics_ ) + , sparseImageInt64Atomics( sparseImageInt64Atomics_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageInt64Atomics = shaderImageInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageInt64Atomics = sparseImageInt64Atomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) && + ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + }; + + struct PhysicalDeviceShaderImageFootprintFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageFootprint( imageFootprint_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + { + imageFootprint = imageFootprint_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageFootprint ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); +# endif + } + + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; + }; + + struct PhysicalDeviceShaderIntegerDotProductFeatures + { + using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderIntegerDotProduct( shaderIntegerDotProduct_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & + setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerDotProduct = shaderIntegerDotProduct_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderIntegerDotProduct ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerDotProductFeatures; + }; + + using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; + + struct PhysicalDeviceShaderIntegerDotProductProperties + { + using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ) + , integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ) + , integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ) + , integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ) + , integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ) + , integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) + , integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ) + , integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ) + , integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ) + , integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ) + , integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ) + , integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ) + , integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ) + , integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ) + , integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + integerDotProduct8BitUnsignedAccelerated, + integerDotProduct8BitSignedAccelerated, + integerDotProduct8BitMixedSignednessAccelerated, + integerDotProduct4x8BitPackedUnsignedAccelerated, + integerDotProduct4x8BitPackedSignedAccelerated, + integerDotProduct4x8BitPackedMixedSignednessAccelerated, + integerDotProduct16BitUnsignedAccelerated, + integerDotProduct16BitSignedAccelerated, + integerDotProduct16BitMixedSignednessAccelerated, + integerDotProduct32BitUnsignedAccelerated, + integerDotProduct32BitSignedAccelerated, + integerDotProduct32BitMixedSignednessAccelerated, + integerDotProduct64BitUnsignedAccelerated, + integerDotProduct64BitSignedAccelerated, + integerDotProduct64BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating8BitSignedAccelerated, + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating16BitSignedAccelerated, + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating32BitSignedAccelerated, + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating64BitSignedAccelerated, + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && + ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && + ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) && + ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && + ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && + ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && + ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && + ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) && + ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && + ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && + ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) && + ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && + ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && + ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerDotProductProperties; + }; + + using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; + + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL + { + using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderIntegerFunctions2( shaderIntegerFunctions2_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerFunctions2 = shaderIntegerFunctions2_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderIntegerFunctions2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + }; + + struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderModuleIdentifier( shaderModuleIdentifier_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderModuleIdentifierFeaturesEXT( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderModuleIdentifierFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & + setShaderModuleIdentifier( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + shaderModuleIdentifier = shaderModuleIdentifier_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderModuleIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifier == rhs.shaderModuleIdentifier ); +# endif + } + + bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + }; + + struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( std::array const & shaderModuleIdentifierAlgorithmUUID_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderModuleIdentifierAlgorithmUUID( shaderModuleIdentifierAlgorithmUUID_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT & + operator=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID ); +# endif + } + + bool operator!=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D shaderModuleIdentifierAlgorithmUUID = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT; + }; + + struct PhysicalDeviceShaderObjectFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderObjectFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderObject( shaderObject_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderObjectFeaturesEXT( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderObjectFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderObjectFeaturesEXT & operator=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderObjectFeaturesEXT & operator=( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setShaderObject( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ ) VULKAN_HPP_NOEXCEPT + { + shaderObject = shaderObject_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderObjectFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderObject ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderObject == rhs.shaderObject ); +# endif + } + + bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderObject = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderObjectFeaturesEXT; + }; + + struct PhysicalDeviceShaderObjectPropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderObjectPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( std::array const & shaderBinaryUUID_ = {}, + uint32_t shaderBinaryVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderBinaryUUID( shaderBinaryUUID_ ) + , shaderBinaryVersion( shaderBinaryVersion_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderObjectPropertiesEXT & operator=( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBinaryUUID, shaderBinaryVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderObjectPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBinaryUUID == rhs.shaderBinaryUUID ) && + ( shaderBinaryVersion == rhs.shaderBinaryVersion ); +# endif + } + + bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D shaderBinaryUUID = {}; + uint32_t shaderBinaryVersion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderObjectPropertiesEXT; + }; + + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderSMBuiltins( shaderSMBuiltins_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderSMBuiltins = shaderSMBuiltins_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSMBuiltins ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; + }; + + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV + { + using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderSMCount( shaderSMCount_ ) + , shaderWarpsPerSM( shaderWarpsPerSM_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + void * pNext = {}; + uint32_t shaderSMCount = {}; + uint32_t shaderWarpsPerSM = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; + }; + + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures + { + using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupExtendedTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + }; + + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupUniformControlFlow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + }; + + struct PhysicalDeviceShaderTerminateInvocationFeatures + { + using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderTerminateInvocation( shaderTerminateInvocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderTerminateInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTerminateInvocationFeatures; + }; + + using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; + + struct PhysicalDeviceShaderTileImageFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderTileImageFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderTileImageColorReadAccess( shaderTileImageColorReadAccess_ ) + , shaderTileImageDepthReadAccess( shaderTileImageDepthReadAccess_ ) + , shaderTileImageStencilReadAccess( shaderTileImageStencilReadAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTileImageFeaturesEXT( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTileImageFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTileImageFeaturesEXT & operator=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderTileImageFeaturesEXT & operator=( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageColorReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageColorReadAccess = shaderTileImageColorReadAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageDepthReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageDepthReadAccess = shaderTileImageDepthReadAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageStencilReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageStencilReadAccess = shaderTileImageStencilReadAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderTileImageFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImageFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderTileImageColorReadAccess, shaderTileImageDepthReadAccess, shaderTileImageStencilReadAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTileImageFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageColorReadAccess == rhs.shaderTileImageColorReadAccess ) && + ( shaderTileImageDepthReadAccess == rhs.shaderTileImageDepthReadAccess ) && + ( shaderTileImageStencilReadAccess == rhs.shaderTileImageStencilReadAccess ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTileImageFeaturesEXT; + }; + + struct PhysicalDeviceShaderTileImagePropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderTileImagePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderTileImageCoherentReadAccelerated( shaderTileImageCoherentReadAccelerated_ ) + , shaderTileImageReadSampleFromPixelRateInvocation( shaderTileImageReadSampleFromPixelRateInvocation_ ) + , shaderTileImageReadFromHelperInvocation( shaderTileImageReadFromHelperInvocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTileImagePropertiesEXT( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTileImagePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTileImagePropertiesEXT & operator=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderTileImagePropertiesEXT & operator=( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderTileImagePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTileImagePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, shaderTileImageCoherentReadAccelerated, shaderTileImageReadSampleFromPixelRateInvocation, shaderTileImageReadFromHelperInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTileImagePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageCoherentReadAccelerated == rhs.shaderTileImageCoherentReadAccelerated ) && + ( shaderTileImageReadSampleFromPixelRateInvocation == rhs.shaderTileImageReadSampleFromPixelRateInvocation ) && + ( shaderTileImageReadFromHelperInvocation == rhs.shaderTileImageReadFromHelperInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTileImagePropertiesEXT; + }; + + struct PhysicalDeviceShadingRateImageFeaturesNV + { + using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shadingRateImage( shadingRateImage_ ) + , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImage = shadingRateImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) && + ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); +# endif + } + + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImageFeaturesNV; + }; + + struct PhysicalDeviceShadingRateImagePropertiesNV + { + using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, + uint32_t shadingRatePaletteSize_ = {}, + uint32_t shadingRateMaxCoarseSamples_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shadingRateTexelSize( shadingRateTexelSize_ ) + , shadingRatePaletteSize( shadingRatePaletteSize_ ) + , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) && + ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); +# endif + } + + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; + uint32_t shadingRatePaletteSize = {}; + uint32_t shadingRateMaxCoarseSamples = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShadingRateImagePropertiesNV; + }; + + struct PhysicalDeviceSparseImageFormatInfo2 + { + using NativeType = VkPhysicalDeviceSparseImageFormatInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , format( format_ ) + , type( type_ ) + , samples( samples_ ) + , usage( usage_ ) + , tiling( tiling_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, type, samples, usage, tiling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( samples == rhs.samples ) && + ( usage == rhs.usage ) && ( tiling == rhs.tiling ); +# endif + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSparseImageFormatInfo2; + }; + + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + struct PhysicalDeviceSubgroupProperties + { + using NativeType = VkPhysicalDeviceSubgroupProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , subgroupSize( subgroupSize_ ) + , supportedStages( supportedStages_ ) + , supportedOperations( supportedOperations_ ) + , quadOperationsInAllStages( quadOperationsInAllStages_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && ( supportedStages == rhs.supportedStages ) && + ( supportedOperations == rhs.supportedOperations ) && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + void * pNext = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupProperties; + }; + + struct PhysicalDeviceSubgroupSizeControlFeatures + { + using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , subgroupSizeControl( subgroupSizeControl_ ) + , computeFullSubgroups( computeFullSubgroups_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & + setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + { + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & + setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) && + ( computeFullSubgroups == rhs.computeFullSubgroups ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlFeatures; + }; + + using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; + + struct PhysicalDeviceSubgroupSizeControlProperties + { + using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minSubgroupSize( minSubgroupSize_ ) + , maxSubgroupSize( maxSubgroupSize_ ) + , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) + , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); +# endif + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubgroupSizeControlProperties; + }; + + using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; + + struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT + { + using NativeType = VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , subpassMergeFeedback( subpassMergeFeedback_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & + setSubpassMergeFeedback( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ ) VULKAN_HPP_NOEXCEPT + { + subpassMergeFeedback = subpassMergeFeedback_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subpassMergeFeedback ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassMergeFeedback == rhs.subpassMergeFeedback ); +# endif + } + + bool operator!=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + }; + + struct PhysicalDeviceSubpassShadingFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , subpassShading( subpassShading_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT + { + subpassShading = subpassShading_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subpassShading ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassShading == rhs.subpassShading ); +# endif + } + + bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI; + }; + + struct PhysicalDeviceSubpassShadingPropertiesHUAWEI + { + using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxSubpassShadingWorkgroupSizeAspectRatio( maxSubpassShadingWorkgroupSizeAspectRatio_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio ); +# endif + } + + bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; + void * pNext = {}; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI; + }; + + struct PhysicalDeviceSurfaceInfo2KHR + { + using NativeType = VkPhysicalDeviceSurfaceInfo2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , surface( surface_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surface ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default; +#else + bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface ); +# endif + } + + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSurfaceInfo2KHR; + }; + + struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT + { + using NativeType = VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchainMaintenance1( swapchainMaintenance1_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSwapchainMaintenance1FeaturesEXT( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSwapchainMaintenance1FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & + setSwapchainMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ ) VULKAN_HPP_NOEXCEPT + { + swapchainMaintenance1 = swapchainMaintenance1_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainMaintenance1 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainMaintenance1 == rhs.swapchainMaintenance1 ); +# endif + } + + bool operator!=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSwapchainMaintenance1FeaturesEXT; + }; + + struct PhysicalDeviceSynchronization2Features + { + using NativeType = VkPhysicalDeviceSynchronization2Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , synchronization2( synchronization2_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSynchronization2Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + { + synchronization2 = synchronization2_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, synchronization2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default; +#else + bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 ); +# endif + } + + bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSynchronization2Features; + }; + + using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; + + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT + { + using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , texelBufferAlignment( texelBufferAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT + { + texelBufferAlignment = texelBufferAlignment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, texelBufferAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + }; + + struct PhysicalDeviceTexelBufferAlignmentProperties + { + using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) + , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) + , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) + , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + storageTexelBufferOffsetAlignmentBytes, + storageTexelBufferOffsetSingleTexelAlignment, + uniformTexelBufferOffsetAlignmentBytes, + uniformTexelBufferOffsetSingleTexelAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTexelBufferAlignmentProperties; + }; + + using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; + + struct PhysicalDeviceTextureCompressionASTCHDRFeatures + { + using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureCompressionASTC_HDR ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); +# endif + } + + bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures; + }; + + using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; + + struct PhysicalDeviceTilePropertiesFeaturesQCOM + { + using NativeType = VkPhysicalDeviceTilePropertiesFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , tileProperties( tileProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTilePropertiesFeaturesQCOM( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTilePropertiesFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setTileProperties( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ ) VULKAN_HPP_NOEXCEPT + { + tileProperties = tileProperties_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTilePropertiesFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, tileProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTilePropertiesFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileProperties == rhs.tileProperties ); +# endif + } + + bool operator!=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileProperties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTilePropertiesFeaturesQCOM; + }; + + struct PhysicalDeviceTimelineSemaphoreFeatures + { + using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , timelineSemaphore( timelineSemaphore_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + timelineSemaphore = timelineSemaphore_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, timelineSemaphore ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore ); +# endif + } + + bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreFeatures; + }; + + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + + struct PhysicalDeviceTimelineSemaphoreProperties + { + using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); +# endif + } + + bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + void * pNext = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTimelineSemaphoreProperties; + }; + + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; + + struct PhysicalDeviceToolProperties + { + using NativeType = VkPhysicalDeviceToolProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( std::array const & name_ = {}, + std::array const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, + std::array const & description_ = {}, + std::array const & layer_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , name( name_ ) + , version( version_ ) + , purposes( purposes_ ) + , description( description_ ) + , layer( layer_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceToolProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, version, purposes, description, layer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceToolProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && ( purposes == rhs.purposes ) && + ( description == rhs.description ) && ( layer == rhs.layer ); +# endif + } + + bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceToolProperties; + }; + + using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; + + struct PhysicalDeviceTransformFeedbackFeaturesEXT + { + using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , transformFeedback( transformFeedback_ ) + , geometryStreams( geometryStreams_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & + setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedback = transformFeedback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT + { + geometryStreams = geometryStreams_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transformFeedback, geometryStreams ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && ( geometryStreams == rhs.geometryStreams ); +# endif + } + + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackFeaturesEXT; + }; + + struct PhysicalDeviceTransformFeedbackPropertiesEXT + { + using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {}, + uint32_t maxTransformFeedbackBuffers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, + uint32_t maxTransformFeedbackStreamDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataStride_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) + , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) + , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) + , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ) + , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ) + , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ) + , transformFeedbackQueries( transformFeedbackQueries_ ) + , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ) + , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ) + , transformFeedbackDraw( transformFeedbackDraw_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxTransformFeedbackStreams, + maxTransformFeedbackBuffers, + maxTransformFeedbackBufferSize, + maxTransformFeedbackStreamDataSize, + maxTransformFeedbackBufferDataSize, + maxTransformFeedbackBufferDataStride, + transformFeedbackQueries, + transformFeedbackStreamsLinesTriangles, + transformFeedbackRasterizationStreamSelect, + transformFeedbackDraw ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) && + ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) && + ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) && + ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) && + ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) && + ( transformFeedbackQueries == rhs.transformFeedbackQueries ) && + ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) && + ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) && + ( transformFeedbackDraw == rhs.transformFeedbackDraw ); +# endif + } + + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + void * pNext = {}; + uint32_t maxTransformFeedbackStreams = {}; + uint32_t maxTransformFeedbackBuffers = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; + uint32_t maxTransformFeedbackStreamDataSize = {}; + uint32_t maxTransformFeedbackBufferDataSize = {}; + uint32_t maxTransformFeedbackBufferDataStride = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceTransformFeedbackPropertiesEXT; + }; + + struct PhysicalDeviceUniformBufferStandardLayoutFeatures + { + using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, uniformBufferStandardLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); +# endif + } + + bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; + }; + + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + + struct PhysicalDeviceVariablePointersFeatures + { + using NativeType = VkPhysicalDeviceVariablePointersFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVariablePointersFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & + setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ); +# endif + } + + bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVariablePointersFeatures; + }; + + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + + struct PhysicalDeviceVertexAttributeDivisorFeaturesKHR + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) + , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorFeaturesKHR( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorFeaturesKHR( VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeDivisorFeaturesKHR & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorFeaturesKHR & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesKHR & + setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesKHR & + setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + }; + + using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVertexAttribDivisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + }; + + struct PhysicalDeviceVertexAttributeDivisorPropertiesKHR + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesKHR( uint32_t maxVertexAttribDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + , supportsNonZeroFirstInstance( supportsNonZeroFirstInstance_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorPropertiesKHR( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorPropertiesKHR( VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeDivisorPropertiesKHR & + operator=( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorPropertiesKHR & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVertexAttribDivisor, supportsNonZeroFirstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && + ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesKHR; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorPropertiesKHR; + }; + + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , vertexInputDynamicState( vertexInputDynamicState_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + vertexInputDynamicState = vertexInputDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexInputDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexInputDynamicState == rhs.vertexInputDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + }; + + struct VideoProfileInfoKHR + { + using NativeType = VkVideoProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone, + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , videoCodecOperation( videoCodecOperation_ ) + , chromaSubsampling( chromaSubsampling_ ) + , lumaBitDepth( lumaBitDepth_ ) + , chromaBitDepth( chromaBitDepth_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileInfoKHR( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoProfileInfoKHR & operator=( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoProfileInfoKHR & operator=( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & + setVideoCodecOperation( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT + { + videoCodecOperation = videoCodecOperation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & + setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT + { + chromaSubsampling = chromaSubsampling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + lumaBitDepth = lumaBitDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT + { + chromaBitDepth = chromaBitDepth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfileInfoKHR const & ) const = default; +#else + bool operator==( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) && + ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && ( chromaBitDepth == rhs.chromaBitDepth ); +# endif + } + + bool operator!=( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone; + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {}; + }; + + template <> + struct CppType + { + using Type = VideoProfileInfoKHR; + }; + + struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR + { + using NativeType = VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, + uint32_t qualityLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pVideoProfile( pVideoProfile_ ) + , qualityLevel( qualityLevel_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoEncodeQualityLevelInfoKHR( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeQualityLevelInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & + setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT + { + pVideoProfile = pVideoProfile_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevel = qualityLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pVideoProfile, qualityLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVideoProfile == rhs.pVideoProfile ) && ( qualityLevel == rhs.qualityLevel ); +# endif + } + + bool operator!=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {}; + uint32_t qualityLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + }; + + struct PhysicalDeviceVideoFormatInfoKHR + { + using NativeType = VkPhysicalDeviceVideoFormatInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageUsage( imageUsage_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ); +# endif + } + + bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoFormatInfoKHR; + }; + + struct PhysicalDeviceVideoMaintenance1FeaturesKHR + { + using NativeType = VkPhysicalDeviceVideoMaintenance1FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , videoMaintenance1( videoMaintenance1_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoMaintenance1FeaturesKHR( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoMaintenance1FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & + setVideoMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ ) VULKAN_HPP_NOEXCEPT + { + videoMaintenance1 = videoMaintenance1_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoMaintenance1 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoMaintenance1FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoMaintenance1 == rhs.videoMaintenance1 ); +# endif + } + + bool operator!=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoMaintenance1FeaturesKHR; + }; + + struct PhysicalDeviceVulkan11Features + { + using NativeType = VkPhysicalDeviceVulkan11Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + , multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + , protectedMemory( protectedMemory_ ) + , samplerYcbcrConversion( samplerYcbcrConversion_ ) + , shaderDrawParameters( shaderDrawParameters_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + storageBuffer16BitAccess, + uniformAndStorageBuffer16BitAccess, + storagePushConstant16, + storageInputOutput16, + multiview, + multiviewGeometryShader, + multiviewTessellationShader, + variablePointersStorageBuffer, + variablePointers, + protectedMemory, + samplerYcbcrConversion, + shaderDrawParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && ( variablePointers == rhs.variablePointers ) && + ( protectedMemory == rhs.protectedMemory ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) && + ( shaderDrawParameters == rhs.shaderDrawParameters ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Features; + }; + + struct PhysicalDeviceVulkan11Properties + { + using NativeType = VkPhysicalDeviceVulkan11Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( + std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceUUID( deviceUUID_ ) + , driverUUID( driverUUID_ ) + , deviceLUID( deviceLUID_ ) + , deviceNodeMask( deviceNodeMask_ ) + , deviceLUIDValid( deviceLUIDValid_ ) + , subgroupSize( subgroupSize_ ) + , subgroupSupportedStages( subgroupSupportedStages_ ) + , subgroupSupportedOperations( subgroupSupportedOperations_ ) + , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ) + , pointClippingBehavior( pointClippingBehavior_ ) + , maxMultiviewViewCount( maxMultiviewViewCount_ ) + , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + , protectedNoFault( protectedNoFault_ ) + , maxPerSetDescriptors( maxPerSetDescriptors_ ) + , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + deviceUUID, + driverUUID, + deviceLUID, + deviceNodeMask, + deviceLUIDValid, + subgroupSize, + subgroupSupportedStages, + subgroupSupportedOperations, + subgroupQuadOperationsInAllStages, + pointClippingBehavior, + maxMultiviewViewCount, + maxMultiviewInstanceIndex, + protectedNoFault, + maxPerSetDescriptors, + maxMemoryAllocationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && + ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && + ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) && + ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) && + ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && ( pointClippingBehavior == rhs.pointClippingBehavior ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) && + ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan11Properties; + }; + + struct PhysicalDeviceVulkan12Features + { + using NativeType = VkPhysicalDeviceVulkan12Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ) + , drawIndirectCount( drawIndirectCount_ ) + , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + , shaderFloat16( shaderFloat16_ ) + , shaderInt8( shaderInt8_ ) + , descriptorIndexing( descriptorIndexing_ ) + , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + , samplerFilterMinmax( samplerFilterMinmax_ ) + , scalarBlockLayout( scalarBlockLayout_ ) + , imagelessFramebuffer( imagelessFramebuffer_ ) + , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + , hostQueryReset( hostQueryReset_ ) + , timelineSemaphore( timelineSemaphore_ ) + , bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + , vulkanMemoryModel( vulkanMemoryModel_ ) + , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) + , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + , shaderOutputViewportIndex( shaderOutputViewportIndex_ ) + , shaderOutputLayer( shaderOutputLayer_ ) + , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT + { + samplerMirrorClampToEdge = samplerMirrorClampToEdge_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectCount = drawIndirectCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT + { + descriptorIndexing = descriptorIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT + { + samplerFilterMinmax = samplerFilterMinmax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + { + timelineSemaphore = timelineSemaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputViewportIndex = shaderOutputViewportIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputLayer = shaderOutputLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT + { + subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + samplerMirrorClampToEdge, + drawIndirectCount, + storageBuffer8BitAccess, + uniformAndStorageBuffer8BitAccess, + storagePushConstant8, + shaderBufferInt64Atomics, + shaderSharedInt64Atomics, + shaderFloat16, + shaderInt8, + descriptorIndexing, + shaderInputAttachmentArrayDynamicIndexing, + shaderUniformTexelBufferArrayDynamicIndexing, + shaderStorageTexelBufferArrayDynamicIndexing, + shaderUniformBufferArrayNonUniformIndexing, + shaderSampledImageArrayNonUniformIndexing, + shaderStorageBufferArrayNonUniformIndexing, + shaderStorageImageArrayNonUniformIndexing, + shaderInputAttachmentArrayNonUniformIndexing, + shaderUniformTexelBufferArrayNonUniformIndexing, + shaderStorageTexelBufferArrayNonUniformIndexing, + descriptorBindingUniformBufferUpdateAfterBind, + descriptorBindingSampledImageUpdateAfterBind, + descriptorBindingStorageImageUpdateAfterBind, + descriptorBindingStorageBufferUpdateAfterBind, + descriptorBindingUniformTexelBufferUpdateAfterBind, + descriptorBindingStorageTexelBufferUpdateAfterBind, + descriptorBindingUpdateUnusedWhilePending, + descriptorBindingPartiallyBound, + descriptorBindingVariableDescriptorCount, + runtimeDescriptorArray, + samplerFilterMinmax, + scalarBlockLayout, + imagelessFramebuffer, + uniformBufferStandardLayout, + shaderSubgroupExtendedTypes, + separateDepthStencilLayouts, + hostQueryReset, + timelineSemaphore, + bufferDeviceAddress, + bufferDeviceAddressCaptureReplay, + bufferDeviceAddressMultiDevice, + vulkanMemoryModel, + vulkanMemoryModelDeviceScope, + vulkanMemoryModelAvailabilityVisibilityChains, + shaderOutputViewportIndex, + shaderOutputLayer, + subgroupBroadcastDynamicId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) && + ( drawIndirectCount == rhs.drawIndirectCount ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && + ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && + ( scalarBlockLayout == rhs.scalarBlockLayout ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && ( hostQueryReset == rhs.hostQueryReset ) && + ( timelineSemaphore == rhs.timelineSemaphore ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) && + ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && ( shaderOutputLayer == rhs.shaderOutputLayer ) && + ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Features; + }; + + struct PhysicalDeviceVulkan12Properties + { + using NativeType = VkPhysicalDeviceVulkan12Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , driverID( driverID_ ) + , driverName( driverName_ ) + , driverInfo( driverInfo_ ) + , conformanceVersion( conformanceVersion_ ) + , denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + , supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ConformanceVersion const &, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, + VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint64_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + driverID, + driverName, + driverInfo, + conformanceVersion, + denormBehaviorIndependence, + roundingModeIndependence, + shaderSignedZeroInfNanPreserveFloat16, + shaderSignedZeroInfNanPreserveFloat32, + shaderSignedZeroInfNanPreserveFloat64, + shaderDenormPreserveFloat16, + shaderDenormPreserveFloat32, + shaderDenormPreserveFloat64, + shaderDenormFlushToZeroFloat16, + shaderDenormFlushToZeroFloat32, + shaderDenormFlushToZeroFloat64, + shaderRoundingModeRTEFloat16, + shaderRoundingModeRTEFloat32, + shaderRoundingModeRTEFloat64, + shaderRoundingModeRTZFloat16, + shaderRoundingModeRTZFloat32, + shaderRoundingModeRTZFloat64, + maxUpdateAfterBindDescriptorsInAllPools, + shaderUniformBufferArrayNonUniformIndexingNative, + shaderSampledImageArrayNonUniformIndexingNative, + shaderStorageBufferArrayNonUniformIndexingNative, + shaderStorageImageArrayNonUniformIndexingNative, + shaderInputAttachmentArrayNonUniformIndexingNative, + robustBufferAccessUpdateAfterBind, + quadDivergentImplicitLod, + maxPerStageDescriptorUpdateAfterBindSamplers, + maxPerStageDescriptorUpdateAfterBindUniformBuffers, + maxPerStageDescriptorUpdateAfterBindStorageBuffers, + maxPerStageDescriptorUpdateAfterBindSampledImages, + maxPerStageDescriptorUpdateAfterBindStorageImages, + maxPerStageDescriptorUpdateAfterBindInputAttachments, + maxPerStageUpdateAfterBindResources, + maxDescriptorSetUpdateAfterBindSamplers, + maxDescriptorSetUpdateAfterBindUniformBuffers, + maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindStorageBuffers, + maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindSampledImages, + maxDescriptorSetUpdateAfterBindStorageImages, + maxDescriptorSetUpdateAfterBindInputAttachments, + supportedDepthResolveModes, + supportedStencilResolveModes, + independentResolveNone, + independentResolve, + filterMinmaxSingleComponentFormats, + filterMinmaxImageComponentMapping, + maxTimelineSemaphoreValueDifference, + framebufferIntegerColorSampleCounts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) && + ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && ( independentResolve == rhs.independentResolve ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && + ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan12Properties; + }; + + struct PhysicalDeviceVulkan13Features + { + using NativeType = VkPhysicalDeviceVulkan13Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , robustImageAccess( robustImageAccess_ ) + , inlineUniformBlock( inlineUniformBlock_ ) + , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + , pipelineCreationCacheControl( pipelineCreationCacheControl_ ) + , privateData( privateData_ ) + , shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + , shaderTerminateInvocation( shaderTerminateInvocation_ ) + , subgroupSizeControl( subgroupSizeControl_ ) + , computeFullSubgroups( computeFullSubgroups_ ) + , synchronization2( synchronization2_ ) + , textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) + , shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) + , dynamicRendering( dynamicRendering_ ) + , shaderIntegerDotProduct( shaderIntegerDotProduct_ ) + , maintenance4( maintenance4_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan13Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + { + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + { + synchronization2 = synchronization2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT + { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRendering = dynamicRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerDotProduct = shaderIntegerDotProduct_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT + { + maintenance4 = maintenance4_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + robustImageAccess, + inlineUniformBlock, + descriptorBindingInlineUniformBlockUpdateAfterBind, + pipelineCreationCacheControl, + privateData, + shaderDemoteToHelperInvocation, + shaderTerminateInvocation, + subgroupSizeControl, + computeFullSubgroups, + synchronization2, + textureCompressionASTC_HDR, + shaderZeroInitializeWorkgroupMemory, + dynamicRendering, + shaderIntegerDotProduct, + maintenance4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ) && + ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) && + ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) && ( privateData == rhs.privateData ) && + ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) && + ( subgroupSizeControl == rhs.subgroupSizeControl ) && ( computeFullSubgroups == rhs.computeFullSubgroups ) && + ( synchronization2 == rhs.synchronization2 ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) && + ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) && ( dynamicRendering == rhs.dynamicRendering ) && + ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ) && ( maintenance4 == rhs.maintenance4 ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan13Features; + }; + + struct PhysicalDeviceVulkan13Properties + { + using NativeType = VkPhysicalDeviceVulkan13Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVulkan13Properties( uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, + uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxInlineUniformTotalSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minSubgroupSize( minSubgroupSize_ ) + , maxSubgroupSize( maxSubgroupSize_ ) + , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) + , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) + , maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) + , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) + , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + , maxInlineUniformTotalSize( maxInlineUniformTotalSize_ ) + , integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ) + , integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ) + , integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ) + , integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ) + , integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ) + , integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) + , integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ) + , integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ) + , integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ) + , integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ) + , integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ) + , integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ) + , integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ) + , integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ) + , integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) + , storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) + , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) + , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) + , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) + , maxBufferSize( maxBufferSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan13Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minSubgroupSize, + maxSubgroupSize, + maxComputeWorkgroupSubgroups, + requiredSubgroupSizeStages, + maxInlineUniformBlockSize, + maxPerStageDescriptorInlineUniformBlocks, + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, + maxDescriptorSetInlineUniformBlocks, + maxDescriptorSetUpdateAfterBindInlineUniformBlocks, + maxInlineUniformTotalSize, + integerDotProduct8BitUnsignedAccelerated, + integerDotProduct8BitSignedAccelerated, + integerDotProduct8BitMixedSignednessAccelerated, + integerDotProduct4x8BitPackedUnsignedAccelerated, + integerDotProduct4x8BitPackedSignedAccelerated, + integerDotProduct4x8BitPackedMixedSignednessAccelerated, + integerDotProduct16BitUnsignedAccelerated, + integerDotProduct16BitSignedAccelerated, + integerDotProduct16BitMixedSignednessAccelerated, + integerDotProduct32BitUnsignedAccelerated, + integerDotProduct32BitSignedAccelerated, + integerDotProduct32BitMixedSignednessAccelerated, + integerDotProduct64BitUnsignedAccelerated, + integerDotProduct64BitSignedAccelerated, + integerDotProduct64BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating8BitSignedAccelerated, + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating16BitSignedAccelerated, + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating32BitSignedAccelerated, + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating64BitSignedAccelerated, + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, + storageTexelBufferOffsetAlignmentBytes, + storageTexelBufferOffsetSingleTexelAlignment, + uniformTexelBufferOffsetAlignmentBytes, + uniformTexelBufferOffsetSingleTexelAlignment, + maxBufferSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) && + ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) && + ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize ) && + ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && + ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && + ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) && + ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && + ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && + ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && + ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && + ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) && + ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && + ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && + ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) && + ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && + ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && + ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ) && + ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) && ( maxBufferSize == rhs.maxBufferSize ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxInlineUniformTotalSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan13Properties; + }; + + struct PhysicalDeviceVulkanMemoryModelFeatures + { + using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , vulkanMemoryModel( vulkanMemoryModel_ ) + , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) + , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); +# endif + } + + bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkanMemoryModelFeatures; + }; + + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR + { + using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ) + , workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ) + , workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ) + , workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + workgroupMemoryExplicitLayout, + workgroupMemoryExplicitLayoutScalarBlockLayout, + workgroupMemoryExplicitLayout8BitAccess, + workgroupMemoryExplicitLayout16BitAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) && + ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) && + ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) && + ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess ); +# endif + } + + bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + }; + + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT + { + using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , ycbcr2plane444Formats( ycbcr2plane444Formats_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT + { + ycbcr2plane444Formats = ycbcr2plane444Formats_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ycbcr2plane444Formats ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats ); +# endif + } + + bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + }; + + struct PhysicalDeviceYcbcrDegammaFeaturesQCOM + { + using NativeType = VkPhysicalDeviceYcbcrDegammaFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , ycbcrDegamma( ycbcrDegamma_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrDegammaFeaturesQCOM( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrDegammaFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setYcbcrDegamma( VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrDegamma = ycbcrDegamma_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ycbcrDegamma ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrDegamma == rhs.ycbcrDegamma ); +# endif + } + + bool operator!=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcrDegammaFeaturesQCOM; + }; + + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT + { + using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , ycbcrImageArrays( ycbcrImageArrays_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & + setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrImageArrays = ycbcrImageArrays_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, ycbcrImageArrays ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); +# endif + } + + bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; + }; + + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures + { + using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT + { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ); +# endif + } + + bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + }; + + using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + + struct PipelineCacheCreateInfo + { + using NativeType = VkPipelineCacheCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheCreateInfo const & ) const = default; +#else + bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) && + ( pInitialData == rhs.pInitialData ); +# endif + } + + bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCacheCreateInfo; + }; + + struct PipelineCacheHeaderVersionOne + { + using NativeType = VkPipelineCacheHeaderVersionOne; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineCacheHeaderVersionOne( uint32_t headerSize_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + std::array const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT + : headerSize( headerSize_ ) + , headerVersion( headerVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , pipelineCacheUUID( pipelineCacheUUID_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheHeaderVersionOne( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT + { + headerSize = headerSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & + setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT + { + headerVersion = headerVersion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT + { + vendorID = vendorID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT + { + deviceID = deviceID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCacheUUID = pipelineCacheUUID_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default; +#else + bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( pipelineCacheUUID == rhs.pipelineCacheUUID ); +# endif + } + + bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t headerSize = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + }; + + struct PipelineColorBlendAdvancedStateCreateInfoEXT + { + using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcPremultiplied( srcPremultiplied_ ) + , dstPremultiplied( dstPremultiplied_ ) + , blendOverlap( blendOverlap_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + { + blendOverlap = blendOverlap_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) && + ( blendOverlap == rhs.blendOverlap ); +# endif + } + + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + }; + + template <> + struct CppType + { + using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; + }; + + struct PipelineColorWriteCreateInfoEXT + { + using NativeType = VkPipelineColorWriteCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , attachmentCount( attachmentCount_ ) + , pColorWriteEnables( pColorWriteEnables_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorWriteCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), attachmentCount( static_cast( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & + setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + { + pColorWriteEnables = pColorWriteEnables_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT & + setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( colorWriteEnables_.size() ); + pColorWriteEnables = colorWriteEnables_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentCount, pColorWriteEnables ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pColorWriteEnables == rhs.pColorWriteEnables ); +# endif + } + + bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {}; + }; + + template <> + struct CppType + { + using Type = PipelineColorWriteCreateInfoEXT; + }; + + struct PipelineCompilerControlCreateInfoAMD + { + using NativeType = VkPipelineCompilerControlCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , compilerControlFlags( compilerControlFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & + setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + { + compilerControlFlags = compilerControlFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compilerControlFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default; +#else + bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags ); +# endif + } + + bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCompilerControlCreateInfoAMD; + }; + + struct PipelineCoverageModulationStateCreateInfoNV + { + using NativeType = VkPipelineCoverageModulationStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, + uint32_t coverageModulationTableCount_ = {}, + const float * pCoverageModulationTable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( coverageModulationTableCount_ ) + , pCoverageModulationTable( pCoverageModulationTable_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( static_cast( coverageModulationTable_.size() ) ) + , pCoverageModulationTable( coverageModulationTable_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationMode = coverageModulationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableEnable = coverageModulationTableEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = coverageModulationTableCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + pCoverageModulationTable = pCoverageModulationTable_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = static_cast( coverageModulationTable_.size() ); + pCoverageModulationTable = coverageModulationTable_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageModulationMode == rhs.coverageModulationMode ) && + ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) && + ( pCoverageModulationTable == rhs.pCoverageModulationTable ); +# endif + } + + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; + uint32_t coverageModulationTableCount = {}; + const float * pCoverageModulationTable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCoverageModulationStateCreateInfoNV; + }; + + struct PipelineCoverageReductionStateCreateInfoNV + { + using NativeType = VkPipelineCoverageReductionStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , coverageReductionMode( coverageReductionMode_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, coverageReductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageReductionMode == rhs.coverageReductionMode ); +# endif + } + + bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + }; + + template <> + struct CppType + { + using Type = PipelineCoverageReductionStateCreateInfoNV; + }; + + struct PipelineCoverageToColorStateCreateInfoNV + { + using NativeType = VkPipelineCoverageToColorStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, + uint32_t coverageToColorLocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , coverageToColorEnable( coverageToColorEnable_ ) + , coverageToColorLocation( coverageToColorLocation_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorEnable = coverageToColorEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorLocation = coverageToColorLocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageToColorEnable == rhs.coverageToColorEnable ) && + ( coverageToColorLocation == rhs.coverageToColorLocation ); +# endif + } + + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; + uint32_t coverageToColorLocation = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCoverageToColorStateCreateInfoNV; + }; + + struct PipelineCreateFlags2CreateInfoKHR + { + using NativeType = VkPipelineCreateFlags2CreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateFlags2CreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( PipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreateFlags2CreateInfoKHR( VkPipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreateFlags2CreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCreateFlags2CreateInfoKHR & operator=( PipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCreateFlags2CreateInfoKHR & operator=( VkPipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCreateFlags2CreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreateFlags2CreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreateFlags2CreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineCreateFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PipelineCreateFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateFlags2CreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCreateFlags2CreateInfoKHR; + }; + + struct PipelineCreationFeedback + { + using NativeType = VkPipelineCreationFeedback; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, + uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , duration( duration_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedback( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( flags, duration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedback const & ) const = default; +#else + bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( duration == rhs.duration ); +# endif + } + + bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {}; + uint64_t duration = {}; + }; + + using PipelineCreationFeedbackEXT = PipelineCreationFeedback; + + struct PipelineCreationFeedbackCreateInfo + { + using NativeType = VkPipelineCreationFeedbackCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {}, + uint32_t pipelineStageCreationFeedbackCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) + , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedbackCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( static_cast( pipelineStageCreationFeedbacks_.size() ) ) + , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineCreationFeedback = pPipelineCreationFeedback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_ ) + VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = static_cast( pipelineStageCreationFeedbacks_.size() ); + pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default; +#else + bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) && + ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) && + ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); +# endif + } + + bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {}; + uint32_t pipelineStageCreationFeedbackCount = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCreationFeedbackCreateInfo; + }; + + using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; + + struct PipelineDiscardRectangleStateCreateInfoEXT + { + using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( discardRectangleCount_ ) + , pDiscardRectangles( pDiscardRectangles_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( static_cast( discardRectangles_.size() ) ) + , pDiscardRectangles( discardRectangles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleMode = discardRectangleMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = discardRectangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pDiscardRectangles = pDiscardRectangles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = static_cast( discardRectangles_.size() ); + pDiscardRectangles = discardRectangles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( discardRectangleMode == rhs.discardRectangleMode ) && + ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles ); +# endif + } + + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; + uint32_t discardRectangleCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {}; + }; + + template <> + struct CppType + { + using Type = PipelineDiscardRectangleStateCreateInfoEXT; + }; + + struct PipelineExecutableInfoKHR + { + using NativeType = VkPipelineExecutableInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + uint32_t executableIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipeline( pipeline_ ) + , executableIndex( executableIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + { + executableIndex = executableIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline, executableIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInfoKHR const & ) const = default; +#else + bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && ( executableIndex == rhs.executableIndex ); +# endif + } + + bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; + }; + + template <> + struct CppType + { + using Type = PipelineExecutableInfoKHR; + }; + + struct PipelineExecutableInternalRepresentationKHR + { + using NativeType = VkPipelineExecutableInternalRepresentationKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + size_t dataSize_ = {}, + void * pData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , name( name_ ) + , description( description_ ) + , isText( isText_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInternalRepresentationKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineExecutableInternalRepresentationKHR( std::array const & name_, + std::array const & description_, + VULKAN_HPP_NAMESPACE::Bool32 isText_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + size_t const &, + void * const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, description, isText, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default; +#else + bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( description == rhs.description ) && ( isText == rhs.isText ) && + ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void * pData = {}; + }; + + template <> + struct CppType + { + using Type = PipelineExecutableInternalRepresentationKHR; + }; + + struct PipelineExecutablePropertiesKHR + { + using NativeType = VkPipelineExecutablePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, + std::array const & name_ = {}, + std::array const & description_ = {}, + uint32_t subgroupSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stages( stages_ ) + , name( name_ ) + , description( description_ ) + , subgroupSize( subgroupSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutablePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stages, name, description, subgroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default; +#else + bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) && ( description == rhs.description ) && + ( subgroupSize == rhs.subgroupSize ); +# endif + } + + bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t subgroupSize = {}; + }; + + template <> + struct CppType + { + using Type = PipelineExecutablePropertiesKHR; + }; + + union PipelineExecutableStatisticValueKHR + { + using NativeType = VkPipelineExecutableStatisticValueKHR; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {} +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT + { + b32 = b32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT + { + i64 = i64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT + { + u64 = u64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT + { + f64 = f64_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkPipelineExecutableStatisticValueKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticValueKHR &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::Bool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#else + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PipelineExecutableStatisticKHR + { + using NativeType = VkPipelineExecutableStatisticKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( + std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , name( name_ ) + , description( description_ ) + , format( format_ ) + , value( value_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableStatisticKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, name, description, format, value ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; + }; + + template <> + struct CppType + { + using Type = PipelineExecutableStatisticKHR; + }; + + struct PipelineFragmentShadingRateEnumStateCreateInfoNV + { + using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shadingRateType( shadingRateType_ ) + , shadingRate( shadingRate_ ) + , combinerOps( combinerOps_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateType = shadingRateType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setCombinerOps( std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && ( shadingRate == rhs.shadingRate ) && + ( combinerOps == rhs.combinerOps ); +# endif + } + + bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize; + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + }; + + template <> + struct CppType + { + using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV; + }; + + struct PipelineFragmentShadingRateStateCreateInfoKHR + { + using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentSize( fragmentSize_ ) + , combinerOps( combinerOps_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 + PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT + { + fragmentSize = fragmentSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + setCombinerOps( std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentSize, combinerOps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && ( combinerOps == rhs.combinerOps ); +# endif + } + + bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + }; + + template <> + struct CppType + { + using Type = PipelineFragmentShadingRateStateCreateInfoKHR; + }; + + struct PipelineIndirectDeviceAddressInfoNV + { + using NativeType = VkPipelineIndirectDeviceAddressInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineIndirectDeviceAddressInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineIndirectDeviceAddressInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineIndirectDeviceAddressInfoNV( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineIndirectDeviceAddressInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineIndirectDeviceAddressInfoNV & operator=( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineIndirectDeviceAddressInfoNV & operator=( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineIndirectDeviceAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineIndirectDeviceAddressInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBindPoint, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineIndirectDeviceAddressInfoNV const & ) const = default; +#else + bool operator==( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineIndirectDeviceAddressInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = PipelineIndirectDeviceAddressInfoNV; + }; + + struct PipelineInfoKHR + { + using NativeType = VkPipelineInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipeline( pipeline_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineInfoKHR( *reinterpret_cast( &rhs ) ) {} + + PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInfoKHR const & ) const = default; +#else + bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = PipelineInfoKHR; + }; + + using PipelineInfoEXT = PipelineInfoKHR; + + struct PushConstantRange + { + using NativeType = VkPushConstantRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast( &rhs ) ) {} + + PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( stageFlags, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantRange const & ) const = default; +#else + bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + }; + + struct PipelineLayoutCreateInfo + { + using NativeType = VkPipelineLayoutCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLayoutCreateInfo const & ) const = default; +#else + bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( setLayoutCount == rhs.setLayoutCount ) && + ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ); +# endif + } + + bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + }; + + template <> + struct CppType + { + using Type = PipelineLayoutCreateInfo; + }; + + struct PipelinePropertiesIdentifierEXT + { + using NativeType = VkPipelinePropertiesIdentifierEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelinePropertiesIdentifierEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( std::array const & pipelineIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineIdentifier( pipelineIdentifier_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelinePropertiesIdentifierEXT( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelinePropertiesIdentifierEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelinePropertiesIdentifierEXT & operator=( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelinePropertiesIdentifierEXT & operator=( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT & setPipelineIdentifier( std::array pipelineIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + pipelineIdentifier = pipelineIdentifier_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelinePropertiesIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelinePropertiesIdentifierEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelinePropertiesIdentifierEXT const & ) const = default; +#else + bool operator==( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineIdentifier == rhs.pipelineIdentifier ); +# endif + } + + bool operator!=( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelinePropertiesIdentifierEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PipelinePropertiesIdentifierEXT; + }; + + struct PipelineRasterizationConservativeStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , conservativeRasterizationMode( conservativeRasterizationMode_ ) + , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + conservativeRasterizationMode = conservativeRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + { + extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) && + ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); +# endif + } + + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; + float extraPrimitiveOverestimationSize = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationConservativeStateCreateInfoEXT; + }; + + struct PipelineRasterizationDepthClipStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , depthClipEnable( depthClipEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, depthClipEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClipEnable == rhs.depthClipEnable ); +# endif + } + + bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; + }; + + struct PipelineRasterizationLineStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationLineStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , lineRasterizationMode( lineRasterizationMode_ ) + , stippledLineEnable( stippledLineEnable_ ) + , lineStippleFactor( lineStippleFactor_ ) + , lineStipplePattern( lineStipplePattern_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationLineStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & + setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + lineRasterizationMode = lineRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & + setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT + { + stippledLineEnable = stippledLineEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + { + lineStippleFactor = lineStippleFactor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + { + lineStipplePattern = lineStipplePattern_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineRasterizationMode == rhs.lineRasterizationMode ) && + ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) && + ( lineStipplePattern == rhs.lineStipplePattern ); +# endif + } + + bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationLineStateCreateInfoEXT; + }; + + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT + { + using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , provokingVertexMode( provokingVertexMode_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & + setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexMode = provokingVertexMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode ); +# endif + } + + bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT; + }; + + struct PipelineRasterizationStateRasterizationOrderAMD + { + using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , rasterizationOrder( rasterizationOrder_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & + setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrder = rasterizationOrder_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rasterizationOrder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default; +#else + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder ); +# endif + } + + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationStateRasterizationOrderAMD; + }; + + struct PipelineRasterizationStateStreamCreateInfoEXT + { + using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, + uint32_t rasterizationStream_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , rasterizationStream( rasterizationStream_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationStream = rasterizationStream_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, rasterizationStream ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationStream == rhs.rasterizationStream ); +# endif + } + + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; + uint32_t rasterizationStream = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRasterizationStateStreamCreateInfoEXT; + }; + + struct PipelineRenderingCreateInfo + { + using NativeType = VkPipelineRenderingCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachmentFormats( pColorAttachmentFormats_ ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRenderingCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineRenderingCreateInfo( uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineRenderingCreateInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRenderingCreateInfo const & ) const = default; +#else + bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); +# endif + } + + bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo; + const void * pNext = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = PipelineRenderingCreateInfo; + }; + + using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; + + struct PipelineRepresentativeFragmentTestStateCreateInfoNV + { + using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , representativeFragmentTestEnable( representativeFragmentTestEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & + setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTestEnable = representativeFragmentTestEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, representativeFragmentTestEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); +# endif + } + + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; + }; + + template <> + struct CppType + { + using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; + }; + + struct PipelineRobustnessCreateInfoEXT + { + using NativeType = VkPipelineRobustnessCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , storageBuffers( storageBuffers_ ) + , uniformBuffers( uniformBuffers_ ) + , vertexInputs( vertexInputs_ ) + , images( images_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineRobustnessCreateInfoEXT( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRobustnessCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineRobustnessCreateInfoEXT & operator=( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineRobustnessCreateInfoEXT & operator=( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & + setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffers = storageBuffers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & + setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ ) VULKAN_HPP_NOEXCEPT + { + uniformBuffers = uniformBuffers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & + setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ ) VULKAN_HPP_NOEXCEPT + { + vertexInputs = vertexInputs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ ) VULKAN_HPP_NOEXCEPT + { + images = images_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineRobustnessCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRobustnessCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffers, uniformBuffers, vertexInputs, images ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineRobustnessCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffers == rhs.storageBuffers ) && ( uniformBuffers == rhs.uniformBuffers ) && + ( vertexInputs == rhs.vertexInputs ) && ( images == rhs.images ); +# endif + } + + bool operator!=( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault; + }; + + template <> + struct CppType + { + using Type = PipelineRobustnessCreateInfoEXT; + }; + + struct PipelineSampleLocationsStateCreateInfoEXT + { + using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , sampleLocationsEnable( sampleLocationsEnable_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsEnable = sampleLocationsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) && + ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + + template <> + struct CppType + { + using Type = PipelineSampleLocationsStateCreateInfoEXT; + }; + + struct PipelineShaderStageModuleIdentifierCreateInfoEXT + { + using NativeType = VkPipelineShaderStageModuleIdentifierCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( uint32_t identifierSize_ = {}, + const uint8_t * pIdentifier_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , identifierSize( identifierSize_ ) + , pIdentifier( pIdentifier_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineShaderStageModuleIdentifierCreateInfoEXT( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageModuleIdentifierCreateInfoEXT( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageModuleIdentifierCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineShaderStageModuleIdentifierCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & identifier_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), identifierSize( static_cast( identifier_.size() ) ), pIdentifier( identifier_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT + { + identifierSize = identifierSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + pIdentifier = pIdentifier_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineShaderStageModuleIdentifierCreateInfoEXT & + setIdentifier( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & identifier_ ) VULKAN_HPP_NOEXCEPT + { + identifierSize = static_cast( identifier_.size() ); + pIdentifier = identifier_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, identifierSize, pIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageModuleIdentifierCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( pIdentifier == rhs.pIdentifier ); +# endif + } + + bool operator!=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT; + const void * pNext = {}; + uint32_t identifierSize = {}; + const uint8_t * pIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PipelineShaderStageNodeCreateInfoAMDX + { + using NativeType = VkPipelineShaderStageNodeCreateInfoAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineShaderStageNodeCreateInfoAMDX( const char * pName_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pName( pName_ ) + , index( index_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageNodeCreateInfoAMDX( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageNodeCreateInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PipelineShaderStageNodeCreateInfoAMDX & operator=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineShaderStageNodeCreateInfoAMDX & operator=( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineShaderStageNodeCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageNodeCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pName, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = index <=> rhs.index; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( index == rhs.index ); + } + + bool operator!=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX; + const void * pNext = {}; + const char * pName = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageNodeCreateInfoAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PipelineShaderStageRequiredSubgroupSizeCreateInfo + { + using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , requiredSubgroupSize( requiredSubgroupSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineShaderStageRequiredSubgroupSizeCreateInfo & + operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, requiredSubgroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default; +#else + bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); +# endif + } + + bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; + void * pNext = {}; + uint32_t requiredSubgroupSize = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + }; + + using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + + struct PipelineTessellationDomainOriginStateCreateInfo + { + using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , domainOrigin( domainOrigin_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & + setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT + { + domainOrigin = domainOrigin_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, domainOrigin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin ); +# endif + } + + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; + }; + + template <> + struct CppType + { + using Type = PipelineTessellationDomainOriginStateCreateInfo; + }; + + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + + struct VertexInputBindingDivisorDescriptionKHR + { + using NativeType = VkVertexInputBindingDivisorDescriptionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionKHR( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , divisor( divisor_ ) + { + } + + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionKHR( VertexInputBindingDivisorDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDivisorDescriptionKHR( VkVertexInputBindingDivisorDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDivisorDescriptionKHR( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputBindingDivisorDescriptionKHR & operator=( VertexInputBindingDivisorDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputBindingDivisorDescriptionKHR & operator=( VkVertexInputBindingDivisorDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionKHR & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionKHR & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVertexInputBindingDivisorDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDivisorDescriptionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binding, divisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDivisorDescriptionKHR const & ) const = default; +#else + bool operator==( VertexInputBindingDivisorDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( divisor == rhs.divisor ); +# endif + } + + bool operator!=( VertexInputBindingDivisorDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t divisor = {}; + }; + + using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescriptionKHR; + + struct PipelineVertexInputDivisorStateCreateInfoKHR + { + using NativeType = VkPipelineVertexInputDivisorStateCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineVertexInputDivisorStateCreateInfoKHR( uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR * pVertexBindingDivisors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , vertexBindingDivisorCount( vertexBindingDivisorCount_ ) + , pVertexBindingDivisors( pVertexBindingDivisors_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoKHR( PipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineVertexInputDivisorStateCreateInfoKHR( VkPipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputDivisorStateCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ) + , pVertexBindingDivisors( vertexBindingDivisors_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineVertexInputDivisorStateCreateInfoKHR & operator=( PipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineVertexInputDivisorStateCreateInfoKHR & operator=( VkPipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoKHR & + setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = vertexBindingDivisorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoKHR & + setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDivisors = pVertexBindingDivisors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfoKHR & setVertexBindingDivisors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); + pVertexBindingDivisors = vertexBindingDivisors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineVertexInputDivisorStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputDivisorStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineVertexInputDivisorStateCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) && + ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); +# endif + } + + bool operator!=( PipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoKHR; + const void * pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR * pVertexBindingDivisors = {}; + }; + + template <> + struct CppType + { + using Type = PipelineVertexInputDivisorStateCreateInfoKHR; + }; + + using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfoKHR; + + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV + { + using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( customSampleOrderCount_ ) + , pCustomSampleOrders( pCustomSampleOrders_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( static_cast( customSampleOrders_.size() ) ) + , pCustomSampleOrders( customSampleOrders_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT + { + sampleOrderType = sampleOrderType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = customSampleOrderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + pCustomSampleOrders = pCustomSampleOrders_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = static_cast( customSampleOrders_.size() ); + pCustomSampleOrders = customSampleOrders_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) && + ( customSampleOrderCount == rhs.customSampleOrderCount ) && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); +# endif + } + + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; + uint32_t customSampleOrderCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; + }; + + struct PipelineViewportDepthClipControlCreateInfoEXT + { + using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , negativeOneToOne( negativeOneToOne_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & + setNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT + { + negativeOneToOne = negativeOneToOne_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, negativeOneToOne ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( negativeOneToOne == rhs.negativeOneToOne ); +# endif + } + + bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportDepthClipControlCreateInfoEXT; + }; + + struct PipelineViewportExclusiveScissorStateCreateInfoNV + { + using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , exclusiveScissorCount( exclusiveScissorCount_ ) + , pExclusiveScissors( pExclusiveScissors_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), exclusiveScissorCount( static_cast( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = exclusiveScissorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & + setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + pExclusiveScissors = pExclusiveScissors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV & + setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = static_cast( exclusiveScissors_.size() ); + pExclusiveScissors = exclusiveScissors_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && + ( pExclusiveScissors == rhs.pExclusiveScissors ); +# endif + } + + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + const void * pNext = {}; + uint32_t exclusiveScissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; + }; + + struct ShadingRatePaletteNV + { + using NativeType = VkShadingRatePaletteNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) + , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) + { + } + + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ShadingRatePaletteNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) + : shadingRatePaletteEntryCount( static_cast( shadingRatePaletteEntries_.size() ) ) + , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & + setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV & setShadingRatePaletteEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) + VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = static_cast( shadingRatePaletteEntries_.size() ); + pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShadingRatePaletteNV const & ) const = default; +#else + bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); +# endif + } + + bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {}; + }; + + struct PipelineViewportShadingRateImageStateCreateInfoNV + { + using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( viewportCount_ ) + , pShadingRatePalettes( pShadingRatePalettes_ ) + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( static_cast( shadingRatePalettes_.size() ) ) + , pShadingRatePalettes( shadingRatePalettes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImageEnable = shadingRateImageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePalettes = pShadingRatePalettes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( shadingRatePalettes_.size() ); + pShadingRatePalettes = shadingRatePalettes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && + ( viewportCount == rhs.viewportCount ) && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); +# endif + } + + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportShadingRateImageStateCreateInfoNV; + }; + + struct ViewportSwizzleNV + { + using NativeType = VkViewportSwizzleNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + , w( w_ ) + { + } + + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportSwizzleNV( *reinterpret_cast( &rhs ) ) {} + + ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT + { + w = w_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( x, y, z, w ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportSwizzleNV const & ) const = default; +#else + bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w ); +# endif + } + + bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + }; + + struct PipelineViewportSwizzleStateCreateInfoNV + { + using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewportSwizzles( pViewportSwizzles_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + pViewportSwizzles = pViewportSwizzles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportSwizzles_.size() ); + pViewportSwizzles = viewportSwizzles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && + ( pViewportSwizzles == rhs.pViewportSwizzles ); +# endif + } + + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportSwizzleStateCreateInfoNV; + }; + + struct ViewportWScalingNV + { + using NativeType = VkViewportWScalingNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT + : xcoeff( xcoeff_ ) + , ycoeff( ycoeff_ ) + { + } + + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportWScalingNV( *reinterpret_cast( &rhs ) ) {} + + ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT + { + xcoeff = xcoeff_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT + { + ycoeff = ycoeff_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( xcoeff, ycoeff ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportWScalingNV const & ) const = default; +#else + bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff ); +# endif + } + + bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float xcoeff = {}; + float ycoeff = {}; + }; + + struct PipelineViewportWScalingStateCreateInfoNV + { + using NativeType = VkPipelineViewportWScalingStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( viewportCount_ ) + , pViewportWScalings( pViewportWScalings_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( static_cast( viewportWScalings_.size() ) ) + , pViewportWScalings( viewportWScalings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + { + viewportWScalingEnable = viewportWScalingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + pViewportWScalings = pViewportWScalings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( viewportWScalings_.size() ); + pViewportWScalings = viewportWScalings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && + ( viewportCount == rhs.viewportCount ) && ( pViewportWScalings == rhs.pViewportWScalings ); +# endif + } + + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportWScalingStateCreateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_GGP ) + struct PresentFrameTokenGGP + { + using NativeType = VkPresentFrameTokenGGP; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , frameToken( frameToken_ ) + { + } + + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentFrameTokenGGP( *reinterpret_cast( &rhs ) ) + { + } + + PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT + { + frameToken = frameToken_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, frameToken ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); + } + + bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; + const void * pNext = {}; + GgpFrameToken frameToken = {}; + }; + + template <> + struct CppType + { + using Type = PresentFrameTokenGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + struct PresentIdKHR + { + using NativeType = VkPresentIdKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchainCount( swapchainCount_ ) + , pPresentIds( pPresentIds_ ) + { + } + + VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentIdKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( presentIds_.size() ) ), pPresentIds( presentIds_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT + { + pPresentIds = pPresentIds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( presentIds_.size() ); + pPresentIds = presentIds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pPresentIds ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentIdKHR const & ) const = default; +#else + bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentIds == rhs.pPresentIds ); +# endif + } + + bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint64_t * pPresentIds = {}; + }; + + template <> + struct CppType + { + using Type = PresentIdKHR; + }; + + struct PresentInfoKHR + { + using NativeType = VkPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, + const uint32_t * pImageIndices_ = {}, + VULKAN_HPP_NAMESPACE::Result * pResults_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , swapchainCount( swapchainCount_ ) + , pSwapchains( pSwapchains_ ) + , pImageIndices( pImageIndices_ ) + , pResults( pResults_ ) + { + } + + VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentInfoKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , swapchainCount( static_cast( swapchains_.size() ) ) + , pSwapchains( swapchains_.data() ) + , pImageIndices( imageIndices_.data() ) + , pResults( results_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() ); + VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) ); + VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) ); +# else + if ( swapchains_.size() != imageIndices_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" ); + } + if ( !results_.empty() && ( swapchains_.size() != results_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" ); + } + if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & + setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT + { + pSwapchains = pSwapchains_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & + setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( swapchains_.size() ); + pSwapchains = swapchains_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT + { + pResults = pResults_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( results_.size() ); + pResults = results_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentInfoKHR const & ) const = default; +#else + bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && ( pSwapchains == rhs.pSwapchains ) && + ( pImageIndices == rhs.pImageIndices ) && ( pResults == rhs.pResults ); +# endif + } + + bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {}; + const uint32_t * pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result * pResults = {}; + }; + + template <> + struct CppType + { + using Type = PresentInfoKHR; + }; + + struct RectLayerKHR + { + using NativeType = VkRectLayerKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , extent( extent_ ) + , layer( layer_ ) + { + } + + VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RectLayerKHR( *reinterpret_cast( &rhs ) ) {} + + explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) {} + + RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT + { + layer = layer_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( offset, extent, layer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RectLayerKHR const & ) const = default; +#else + bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer ); +# endif + } + + bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + uint32_t layer = {}; + }; + + struct PresentRegionKHR + { + using NativeType = VkPresentRegionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangleCount( rectangleCount_ ) + , pRectangles( pRectangles_ ) + { + } + + VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + : rectangleCount( static_cast( rectangles_.size() ) ), pRectangles( rectangles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + rectangleCount = rectangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pRectangles = pRectangles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR & + setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) VULKAN_HPP_NOEXCEPT + { + rectangleCount = static_cast( rectangles_.size() ); + pRectangles = rectangles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( rectangleCount, pRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionKHR const & ) const = default; +#else + bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles ); +# endif + } + + bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t rectangleCount = {}; + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {}; + }; + + struct PresentRegionsKHR + { + using NativeType = VkPresentRegionsKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchainCount( swapchainCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionsKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionsKHR const & ) const = default; +#else + bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = PresentRegionsKHR; + }; + + struct PresentTimeGOOGLE + { + using NativeType = VkPresentTimeGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + { + } + + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT : PresentTimeGOOGLE( *reinterpret_cast( &rhs ) ) {} + + PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT + { + desiredPresentTime = desiredPresentTime_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( presentID, desiredPresentTime ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimeGOOGLE const & ) const = default; +#else + bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ); +# endif + } + + bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + }; + + struct PresentTimesInfoGOOGLE + { + using NativeType = VkPresentTimesInfoGOOGLE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchainCount( swapchainCount_ ) + , pTimes( pTimes_ ) + { + } + + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentTimesInfoGOOGLE( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( times_.size() ) ), pTimes( times_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT + { + pTimes = pTimes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE & + setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( times_.size() ); + pTimes = times_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pTimes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default; +#else + bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pTimes == rhs.pTimes ); +# endif + } + + bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {}; + }; + + template <> + struct CppType + { + using Type = PresentTimesInfoGOOGLE; + }; + + struct PrivateDataSlotCreateInfo + { + using NativeType = VkPrivateDataSlotCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PrivateDataSlotCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default; +#else + bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = PrivateDataSlotCreateInfo; + }; + + using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; + + struct ProtectedSubmitInfo + { + using NativeType = VkProtectedSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , protectedSubmit( protectedSubmit_ ) + { + } + + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ProtectedSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + + ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT + { + protectedSubmit = protectedSubmit_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedSubmit ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ProtectedSubmitInfo const & ) const = default; +#else + bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit ); +# endif + } + + bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; + }; + + template <> + struct CppType + { + using Type = ProtectedSubmitInfo; + }; + + struct PushConstantsInfoKHR + { + using NativeType = VkPushConstantsInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushConstantsInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushConstantsInfoKHR( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + uint32_t offset_ = {}, + uint32_t size_ = {}, + const void * pValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , layout( layout_ ) + , stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( size_ ) + , pValues( pValues_ ) + { + } + + VULKAN_HPP_CONSTEXPR PushConstantsInfoKHR( PushConstantsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantsInfoKHR( VkPushConstantsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PushConstantsInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PushConstantsInfoKHR( VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + uint32_t offset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , layout( layout_ ) + , stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( static_cast( values_.size() * sizeof( T ) ) ) + , pValues( values_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PushConstantsInfoKHR & operator=( PushConstantsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PushConstantsInfoKHR & operator=( VkPushConstantsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PushConstantsInfoKHR & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + size = static_cast( values_.size() * sizeof( T ) ); + pValues = values_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPushConstantsInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantsInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, layout, stageFlags, offset, size, pValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantsInfoKHR const & ) const = default; +#else + bool operator==( PushConstantsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layout == rhs.layout ) && ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && + ( size == rhs.size ) && ( pValues == rhs.pValues ); +# endif + } + + bool operator!=( PushConstantsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushConstantsInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + const void * pValues = {}; + }; + + template <> + struct CppType + { + using Type = PushConstantsInfoKHR; + }; + + struct WriteDescriptorSet + { + using NativeType = VkWriteDescriptorSet; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , pImageInfo( pImageInfo_ ) + , pBufferInfo( pBufferInfo_ ) + , pTexelBufferView( pTexelBufferView_ ) + { + } + + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : WriteDescriptorSet( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, + uint32_t dstBinding_, + uint32_t dstArrayElement_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( static_cast( !imageInfo_.empty() ? imageInfo_.size() + : !bufferInfo_.empty() ? bufferInfo_.size() + : texelBufferView_.size() ) ) + , descriptorType( descriptorType_ ) + , pImageInfo( imageInfo_.data() ) + , pBufferInfo( bufferInfo_.data() ) + , pTexelBufferView( texelBufferView_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 ); +# else + if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT + { + pImageInfo = pImageInfo_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( imageInfo_.size() ); + pImageInfo = imageInfo_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + pBufferInfo = pBufferInfo_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( bufferInfo_.size() ); + pBufferInfo = bufferInfo_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( texelBufferView_.size() ); + pTexelBufferView = texelBufferView_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSet const & ) const = default; +#else + bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && ( pTexelBufferView == rhs.pTexelBufferView ); +# endif + } + + bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSet; + }; + + struct PushDescriptorSetInfoKHR + { + using NativeType = VkPushDescriptorSetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushDescriptorSetInfoKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + uint32_t descriptorWriteCount_ = {}, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , set( set_ ) + , descriptorWriteCount( descriptorWriteCount_ ) + , pDescriptorWrites( pDescriptorWrites_ ) + { + } + + VULKAN_HPP_CONSTEXPR PushDescriptorSetInfoKHR( PushDescriptorSetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushDescriptorSetInfoKHR( VkPushDescriptorSetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PushDescriptorSetInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PushDescriptorSetInfoKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t set_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , set( set_ ) + , descriptorWriteCount( static_cast( descriptorWrites_.size() ) ) + , pDescriptorWrites( descriptorWrites_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PushDescriptorSetInfoKHR & operator=( PushDescriptorSetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PushDescriptorSetInfoKHR & operator=( VkPushDescriptorSetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setDescriptorWriteCount( uint32_t descriptorWriteCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorWriteCount = descriptorWriteCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & + setPDescriptorWrites( const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorWrites = pDescriptorWrites_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PushDescriptorSetInfoKHR & setDescriptorWrites( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_ ) VULKAN_HPP_NOEXCEPT + { + descriptorWriteCount = static_cast( descriptorWrites_.size() ); + pDescriptorWrites = descriptorWrites_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPushDescriptorSetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushDescriptorSetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, set, descriptorWriteCount, pDescriptorWrites ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushDescriptorSetInfoKHR const & ) const = default; +#else + bool operator==( PushDescriptorSetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set ) && + ( descriptorWriteCount == rhs.descriptorWriteCount ) && ( pDescriptorWrites == rhs.pDescriptorWrites ); +# endif + } + + bool operator!=( PushDescriptorSetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + uint32_t descriptorWriteCount = {}; + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites = {}; + }; + + template <> + struct CppType + { + using Type = PushDescriptorSetInfoKHR; + }; + + struct PushDescriptorSetWithTemplateInfoKHR + { + using NativeType = VkPushDescriptorSetWithTemplateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetWithTemplateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfoKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + const void * pData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , descriptorUpdateTemplate( descriptorUpdateTemplate_ ) + , layout( layout_ ) + , set( set_ ) + , pData( pData_ ) + { + } + + VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfoKHR( PushDescriptorSetWithTemplateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushDescriptorSetWithTemplateInfoKHR( VkPushDescriptorSetWithTemplateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PushDescriptorSetWithTemplateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PushDescriptorSetWithTemplateInfoKHR & operator=( PushDescriptorSetWithTemplateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PushDescriptorSetWithTemplateInfoKHR & operator=( VkPushDescriptorSetWithTemplateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & + setDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateTemplate = descriptorUpdateTemplate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPushDescriptorSetWithTemplateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushDescriptorSetWithTemplateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorUpdateTemplate, layout, set, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushDescriptorSetWithTemplateInfoKHR const & ) const = default; +#else + bool operator==( PushDescriptorSetWithTemplateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorUpdateTemplate == rhs.descriptorUpdateTemplate ) && ( layout == rhs.layout ) && + ( set == rhs.set ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( PushDescriptorSetWithTemplateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetWithTemplateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + const void * pData = {}; + }; + + template <> + struct CppType + { + using Type = PushDescriptorSetWithTemplateInfoKHR; + }; + + struct QueryLowLatencySupportNV + { + using NativeType = VkQueryLowLatencySupportNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryLowLatencySupportNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( void * pQueriedLowLatencyData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pQueriedLowLatencyData( pQueriedLowLatencyData_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryLowLatencySupportNV( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryLowLatencySupportNV( *reinterpret_cast( &rhs ) ) + { + } + + QueryLowLatencySupportNV & operator=( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueryLowLatencySupportNV & operator=( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPQueriedLowLatencyData( void * pQueriedLowLatencyData_ ) VULKAN_HPP_NOEXCEPT + { + pQueriedLowLatencyData = pQueriedLowLatencyData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkQueryLowLatencySupportNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryLowLatencySupportNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pQueriedLowLatencyData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryLowLatencySupportNV const & ) const = default; +#else + bool operator==( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pQueriedLowLatencyData == rhs.pQueriedLowLatencyData ); +# endif + } + + bool operator!=( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryLowLatencySupportNV; + const void * pNext = {}; + void * pQueriedLowLatencyData = {}; + }; + + template <> + struct CppType + { + using Type = QueryLowLatencySupportNV; + }; + + struct QueryPoolCreateInfo + { + using NativeType = VkQueryPoolCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, + uint32_t queryCount_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , queryType( queryType_ ) + , queryCount( queryCount_ ) + , pipelineStatistics( pipelineStatistics_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : QueryPoolCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT + { + queryType = queryType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolCreateInfo const & ) const = default; +#else + bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) && + ( pipelineStatistics == rhs.pipelineStatistics ); +# endif + } + + bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + + template <> + struct CppType + { + using Type = QueryPoolCreateInfo; + }; + + struct QueryPoolPerformanceCreateInfoKHR + { + using NativeType = VkQueryPoolPerformanceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + uint32_t counterIndexCount_ = {}, + const uint32_t * pCounterIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( counterIndexCount_ ) + , pCounterIndices( pCounterIndices_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( static_cast( counterIndices_.size() ) ) + , pCounterIndices( counterIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = counterIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT + { + pCounterIndices = pCounterIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR & + setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = static_cast( counterIndices_.size() ); + pCounterIndices = counterIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default; +#else + bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices ); +# endif + } + + bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t * pCounterIndices = {}; + }; + + template <> + struct CppType + { + using Type = QueryPoolPerformanceCreateInfoKHR; + }; + + struct QueryPoolPerformanceQueryCreateInfoINTEL + { + using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , performanceCountersSampling( performanceCountersSampling_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & + setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + { + performanceCountersSampling = performanceCountersSampling_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, performanceCountersSampling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default; +#else + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersSampling == rhs.performanceCountersSampling ); +# endif + } + + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; + }; + + template <> + struct CppType + { + using Type = QueryPoolPerformanceQueryCreateInfoINTEL; + }; + + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; + + struct QueryPoolVideoEncodeFeedbackCreateInfoKHR + { + using NativeType = VkQueryPoolVideoEncodeFeedbackCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , encodeFeedbackFlags( encodeFeedbackFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueryPoolVideoEncodeFeedbackCreateInfoKHR( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolVideoEncodeFeedbackCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & + setEncodeFeedbackFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ ) VULKAN_HPP_NOEXCEPT + { + encodeFeedbackFlags = encodeFeedbackFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, encodeFeedbackFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & ) const = default; +#else + bool operator==( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( encodeFeedbackFlags == rhs.encodeFeedbackFlags ); +# endif + } + + bool operator!=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags = {}; + }; + + template <> + struct CppType + { + using Type = QueryPoolVideoEncodeFeedbackCreateInfoKHR; + }; + + struct QueueFamilyCheckpointProperties2NV + { + using NativeType = VkQueueFamilyCheckpointProperties2NV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointProperties2NV( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, checkpointExecutionStageMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); +# endif + } + + bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyCheckpointProperties2NV; + }; + + struct QueueFamilyCheckpointPropertiesNV + { + using NativeType = VkQueueFamilyCheckpointPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, checkpointExecutionStageMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default; +#else + bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); +# endif + } + + bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyCheckpointPropertiesNV; + }; + + struct QueueFamilyGlobalPriorityPropertiesKHR + { + using NativeType = VkQueueFamilyGlobalPriorityPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + QueueFamilyGlobalPriorityPropertiesKHR( uint32_t priorityCount_ = {}, + std::array const & + priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } }, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , priorityCount( priorityCount_ ) + , priorities( priorities_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyGlobalPriorityPropertiesKHR( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyGlobalPriorityPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyGlobalPriorityPropertiesKHR & operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyGlobalPriorityPropertiesKHR & operator=( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT + { + priorityCount = priorityCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & + setPriorities( std::array priorities_ ) VULKAN_HPP_NOEXCEPT + { + priorities = priorities_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, priorityCount, priorities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyGlobalPriorityPropertiesKHR const & ) const = default; +#else + bool operator==( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && ( priorities == rhs.priorities ); +# endif + } + + bool operator!=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; + void * pNext = {}; + uint32_t priorityCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D priorities = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyGlobalPriorityPropertiesKHR; + }; + + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR; + + struct QueueFamilyProperties + { + using NativeType = VkQueueFamilyProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, + uint32_t queueCount_ = {}, + uint32_t timestampValidBits_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFlags( queueFlags_ ) + , queueCount( queueCount_ ) + , timestampValidBits( timestampValidBits_ ) + , minImageTransferGranularity( minImageTransferGranularity_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties const & ) const = default; +#else + bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && ( timestampValidBits == rhs.timestampValidBits ) && + ( minImageTransferGranularity == rhs.minImageTransferGranularity ); +# endif + } + + bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + }; + + struct QueueFamilyProperties2 + { + using NativeType = VkQueueFamilyProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , queueFamilyProperties( queueFamilyProperties_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties2( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueFamilyProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties2 const & ) const = default; +#else + bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties ); +# endif + } + + bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyProperties2; + }; + + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + + struct QueueFamilyQueryResultStatusPropertiesKHR + { + using NativeType = VkQueueFamilyQueryResultStatusPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , queryResultStatusSupport( queryResultStatusSupport_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyQueryResultStatusPropertiesKHR( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyQueryResultStatusPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyQueryResultStatusPropertiesKHR & operator=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyQueryResultStatusPropertiesKHR & operator=( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyQueryResultStatusPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyQueryResultStatusPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queryResultStatusSupport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyQueryResultStatusPropertiesKHR const & ) const = default; +#else + bool operator==( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryResultStatusSupport == rhs.queryResultStatusSupport ); +# endif + } + + bool operator!=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyQueryResultStatusPropertiesKHR; + }; + + struct QueueFamilyVideoPropertiesKHR + { + using NativeType = VkQueueFamilyVideoPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyVideoPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , videoCodecOperations( videoCodecOperations_ ) + { + } + + VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyVideoPropertiesKHR( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyVideoPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + QueueFamilyVideoPropertiesKHR & operator=( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + QueueFamilyVideoPropertiesKHR & operator=( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkQueueFamilyVideoPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyVideoPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoCodecOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyVideoPropertiesKHR const & ) const = default; +#else + bool operator==( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations ); +# endif + } + + bool operator!=( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyVideoPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {}; + }; + + template <> + struct CppType + { + using Type = QueueFamilyVideoPropertiesKHR; + }; + + struct RayTracingShaderGroupCreateInfoKHR + { + using NativeType = VkRayTracingShaderGroupCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + const void * pShaderGroupCaptureReplayHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) + { + } + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + { + pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) && + ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) && + ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); +# endif + } + + bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + const void * pShaderGroupCaptureReplayHandle = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoKHR; + }; + + struct RayTracingPipelineInterfaceCreateInfoKHR + { + using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {}, + uint32_t maxPipelineRayHitAttributeSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ) + , maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) && + ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize ); +# endif + } + + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void * pNext = {}; + uint32_t maxPipelineRayPayloadSize = {}; + uint32_t maxPipelineRayHitAttributeSize = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingPipelineInterfaceCreateInfoKHR; + }; + + struct RayTracingPipelineCreateInfoKHR + { + using NativeType = VkRayTracingPipelineCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInterface = pLibraryInterface_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + groupCount, + pGroups, + maxPipelineRayRecursionDepth, + pLibraryInfo, + pLibraryInterface, + pDynamicState, + layout, + basePipelineHandle, + basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && ( pDynamicState == rhs.pDynamicState ) && + ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {}; + uint32_t maxPipelineRayRecursionDepth = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoKHR; + }; + + struct RayTracingShaderGroupCreateInfoNV + { + using NativeType = VkRayTracingShaderGroupCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + { + } + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) && + ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ); +# endif + } + + bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + }; + + template <> + struct CppType + { + using Type = RayTracingShaderGroupCreateInfoNV; + }; + + struct RayTracingPipelineCreateInfoNV + { + using NativeType = VkRayTracingPipelineCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = RayTracingPipelineCreateInfoNV; + }; + + struct RefreshCycleDurationGOOGLE + { + using NativeType = VkRefreshCycleDurationGOOGLE; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT : refreshDuration( refreshDuration_ ) {} + + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : RefreshCycleDurationGOOGLE( *reinterpret_cast( &rhs ) ) + { + } + + RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( refreshDuration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default; +#else + bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( refreshDuration == rhs.refreshDuration ); +# endif + } + + bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t refreshDuration = {}; + }; + + struct ReleaseSwapchainImagesInfoEXT + { + using NativeType = VkReleaseSwapchainImagesInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseSwapchainImagesInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint32_t imageIndexCount_ = {}, + const uint32_t * pImageIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchain( swapchain_ ) + , imageIndexCount( imageIndexCount_ ) + , pImageIndices( pImageIndices_ ) + { + } + + VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ReleaseSwapchainImagesInfoEXT( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ReleaseSwapchainImagesInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ReleaseSwapchainImagesInfoEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchain( swapchain_ ), imageIndexCount( static_cast( imageIndices_.size() ) ), pImageIndices( imageIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ReleaseSwapchainImagesInfoEXT & operator=( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ReleaseSwapchainImagesInfoEXT & operator=( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setImageIndexCount( uint32_t imageIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + imageIndexCount = imageIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ReleaseSwapchainImagesInfoEXT & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + { + imageIndexCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkReleaseSwapchainImagesInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkReleaseSwapchainImagesInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchain, imageIndexCount, pImageIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ReleaseSwapchainImagesInfoEXT const & ) const = default; +#else + bool operator==( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndexCount == rhs.imageIndexCount ) && + ( pImageIndices == rhs.pImageIndices ); +# endif + } + + bool operator!=( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseSwapchainImagesInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndexCount = {}; + const uint32_t * pImageIndices = {}; + }; + + template <> + struct CppType + { + using Type = ReleaseSwapchainImagesInfoEXT; + }; + + struct RenderPassAttachmentBeginInfo + { + using NativeType = VkRenderPassAttachmentBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassAttachmentBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo & + setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentCount, pAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default; +#else + bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ); +# endif + } + + bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassAttachmentBeginInfo; + }; + + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; + + struct RenderPassBeginInfo + { + using NativeType = VkRenderPassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t clearValueCount_ = {}, + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( clearValueCount_ ) + , pClearValues( pClearValues_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( static_cast( clearValues_.size() ) ) + , pClearValues( clearValues_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = clearValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT + { + pClearValues = pClearValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo & + setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = static_cast( clearValues_.size() ); + pClearValues = clearValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassBeginInfo const & ) const = default; +#else + bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( framebuffer == rhs.framebuffer ) && + ( renderArea == rhs.renderArea ) && ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues ); +# endif + } + + bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassBeginInfo; + }; + + struct SubpassDescription + { + using NativeType = VkSubpassDescription; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & + setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( flags, + pipelineBindPoint, + inputAttachmentCount, + pInputAttachments, + colorAttachmentCount, + pColorAttachments, + pResolveAttachments, + pDepthStencilAttachment, + preserveAttachmentCount, + pPreserveAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription const & ) const = default; +#else + bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && + ( pInputAttachments == rhs.pInputAttachments ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachments == rhs.pColorAttachments ) && ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); +# endif + } + + bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + + struct SubpassDependency + { + using NativeType = VkSubpassDependency; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency( *reinterpret_cast( &rhs ) ) {} + + SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency const & ) const = default; +#else + bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( dependencyFlags == rhs.dependencyFlags ); +# endif + } + + bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + }; + + struct RenderPassCreateInfo + { + using NativeType = VkRenderPassCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & + setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & + setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo const & ) const = default; +#else + bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ); +# endif + } + + bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassCreateInfo; + }; + + struct SubpassDescription2 + { + using NativeType = VkSubpassDescription2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t viewMask_ = {}, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription2( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & + setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + pipelineBindPoint, + viewMask, + inputAttachmentCount, + pInputAttachments, + colorAttachmentCount, + pColorAttachments, + pResolveAttachments, + pDepthStencilAttachment, + preserveAttachmentCount, + pPreserveAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescription2 const & ) const = default; +#else + bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( viewMask == rhs.viewMask ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && ( pPreserveAttachments == rhs.pPreserveAttachments ); +# endif + } + + bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + + template <> + struct CppType + { + using Type = SubpassDescription2; + }; + + using SubpassDescription2KHR = SubpassDescription2; + + struct SubpassDependency2 + { + using NativeType = VkSubpassDependency2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + int32_t viewOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + , viewOffset( viewOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency2( *reinterpret_cast( &rhs ) ) {} + + SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT + { + viewOffset = viewOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDependency2 const & ) const = default; +#else + bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && + ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && ( viewOffset == rhs.viewOffset ); +# endif + } + + bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; + const void * pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; + }; + + template <> + struct CppType + { + using Type = SubpassDependency2; + }; + + using SubpassDependency2KHR = SubpassDependency2; + + struct RenderPassCreateInfo2 + { + using NativeType = VkRenderPassCreateInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, + uint32_t correlatedViewMaskCount_ = {}, + const uint32_t * pCorrelatedViewMasks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + , correlatedViewMaskCount( correlatedViewMaskCount_ ) + , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo2( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + , correlatedViewMaskCount( static_cast( correlatedViewMasks_.size() ) ) + , pCorrelatedViewMasks( correlatedViewMasks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = correlatedViewMaskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelatedViewMasks = pCorrelatedViewMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = static_cast( correlatedViewMasks_.size() ); + pCorrelatedViewMasks = correlatedViewMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + attachmentCount, + pAttachments, + subpassCount, + pSubpasses, + dependencyCount, + pDependencies, + correlatedViewMaskCount, + pCorrelatedViewMasks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreateInfo2 const & ) const = default; +#else + bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) && + ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); +# endif + } + + bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t * pCorrelatedViewMasks = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassCreateInfo2; + }; + + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; + + struct RenderPassCreationControlEXT + { + using NativeType = VkRenderPassCreationControlEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationControlEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , disallowMerging( disallowMerging_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreationControlEXT( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationControlEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassCreationControlEXT & operator=( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassCreationControlEXT & operator=( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setDisallowMerging( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ ) VULKAN_HPP_NOEXCEPT + { + disallowMerging = disallowMerging_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassCreationControlEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationControlEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, disallowMerging ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreationControlEXT const & ) const = default; +#else + bool operator==( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disallowMerging == rhs.disallowMerging ); +# endif + } + + bool operator!=( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationControlEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 disallowMerging = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassCreationControlEXT; + }; + + struct RenderPassCreationFeedbackInfoEXT + { + using NativeType = VkRenderPassCreationFeedbackInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( uint32_t postMergeSubpassCount_ = {} ) VULKAN_HPP_NOEXCEPT + : postMergeSubpassCount( postMergeSubpassCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreationFeedbackInfoEXT( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationFeedbackInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassCreationFeedbackInfoEXT & operator=( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassCreationFeedbackInfoEXT & operator=( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkRenderPassCreationFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( postMergeSubpassCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreationFeedbackInfoEXT const & ) const = default; +#else + bool operator==( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( postMergeSubpassCount == rhs.postMergeSubpassCount ); +# endif + } + + bool operator!=( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t postMergeSubpassCount = {}; + }; + + struct RenderPassCreationFeedbackCreateInfoEXT + { + using NativeType = VkRenderPassCreationFeedbackCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pRenderPassFeedback( pRenderPassFeedback_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreationFeedbackCreateInfoEXT( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationFeedbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassCreationFeedbackCreateInfoEXT & operator=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassCreationFeedbackCreateInfoEXT & operator=( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & + setPRenderPassFeedback( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pRenderPassFeedback = pRenderPassFeedback_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pRenderPassFeedback ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassCreationFeedbackCreateInfoEXT const & ) const = default; +#else + bool operator==( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pRenderPassFeedback == rhs.pRenderPassFeedback ); +# endif + } + + bool operator!=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassCreationFeedbackCreateInfoEXT; + }; + + struct RenderPassFragmentDensityMapCreateInfoEXT + { + using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & + setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapAttachment = fragmentDensityMapAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default; +#else + bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); +# endif + } + + bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassFragmentDensityMapCreateInfoEXT; + }; + + struct RenderPassInputAttachmentAspectCreateInfo + { + using NativeType = VkRenderPassInputAttachmentAspectCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , aspectReferenceCount( aspectReferenceCount_ ) + , pAspectReferences( pAspectReferences_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), aspectReferenceCount( static_cast( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = aspectReferenceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & + setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + pAspectReferences = pAspectReferences_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = static_cast( aspectReferences_.size() ); + pAspectReferences = aspectReferences_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default; +#else + bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) && + ( pAspectReferences == rhs.pAspectReferences ); +# endif + } + + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + const void * pNext = {}; + uint32_t aspectReferenceCount = {}; + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassInputAttachmentAspectCreateInfo; + }; + + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + + struct RenderPassMultiviewCreateInfo + { + using NativeType = VkRenderPassMultiviewCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, + const uint32_t * pViewMasks_ = {}, + uint32_t dependencyCount_ = {}, + const int32_t * pViewOffsets_ = {}, + uint32_t correlationMaskCount_ = {}, + const uint32_t * pCorrelationMasks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , subpassCount( subpassCount_ ) + , pViewMasks( pViewMasks_ ) + , dependencyCount( dependencyCount_ ) + , pViewOffsets( pViewOffsets_ ) + , correlationMaskCount( correlationMaskCount_ ) + , pCorrelationMasks( pCorrelationMasks_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassMultiviewCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , subpassCount( static_cast( viewMasks_.size() ) ) + , pViewMasks( viewMasks_.data() ) + , dependencyCount( static_cast( viewOffsets_.size() ) ) + , pViewOffsets( viewOffsets_.data() ) + , correlationMaskCount( static_cast( correlationMasks_.size() ) ) + , pCorrelationMasks( correlationMasks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pViewMasks = pViewMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = static_cast( viewMasks_.size() ); + pViewMasks = viewMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pViewOffsets = pViewOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( viewOffsets_.size() ); + pViewOffsets = viewOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = correlationMaskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelationMasks = pCorrelationMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & + setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = static_cast( correlationMasks_.size() ); + pCorrelationMasks = correlationMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default; +#else + bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && ( pViewMasks == rhs.pViewMasks ) && + ( dependencyCount == rhs.dependencyCount ) && ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) && + ( pCorrelationMasks == rhs.pCorrelationMasks ); +# endif + } + + bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + const void * pNext = {}; + uint32_t subpassCount = {}; + const uint32_t * pViewMasks = {}; + uint32_t dependencyCount = {}; + const int32_t * pViewOffsets = {}; + uint32_t correlationMaskCount = {}; + const uint32_t * pCorrelationMasks = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassMultiviewCreateInfo; + }; + + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + + struct SubpassSampleLocationsEXT + { + using NativeType = VkSubpassSampleLocationsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassIndex( subpassIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + { + } + + SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT + { + subpassIndex = subpassIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subpassIndex, sampleLocationsInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassSampleLocationsEXT const & ) const = default; +#else + bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); +# endif + } + + bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpassIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + + struct RenderPassSampleLocationsBeginInfoEXT + { + using NativeType = VkRenderPassSampleLocationsBeginInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, + uint32_t postSubpassSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) + , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) + , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) + , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , attachmentInitialSampleLocationsCount( static_cast( attachmentInitialSampleLocations_.size() ) ) + , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ) + , postSubpassSampleLocationsCount( static_cast( postSubpassSampleLocations_.size() ) ) + , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_ ) + VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = static_cast( attachmentInitialSampleLocations_.size() ); + pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pPostSubpassSampleLocations = pPostSubpassSampleLocations_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ ) + VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = static_cast( postSubpassSampleLocations_.size() ); + pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default; +#else + bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) && + ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) && + ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); +# endif + } + + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + const void * pNext = {}; + uint32_t attachmentInitialSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {}; + uint32_t postSubpassSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassSampleLocationsBeginInfoEXT; + }; + + struct RenderPassStripeInfoARM + { + using NativeType = VkRenderPassStripeInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeInfoARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( VULKAN_HPP_NAMESPACE::Rect2D stripeArea_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stripeArea( stripeArea_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassStripeInfoARM( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeInfoARM( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassStripeInfoARM & operator=( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassStripeInfoARM & operator=( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setStripeArea( VULKAN_HPP_NAMESPACE::Rect2D const & stripeArea_ ) VULKAN_HPP_NOEXCEPT + { + stripeArea = stripeArea_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassStripeInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stripeArea ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassStripeInfoARM const & ) const = default; +#else + bool operator==( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeArea == rhs.stripeArea ); +# endif + } + + bool operator!=( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeInfoARM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D stripeArea = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassStripeInfoARM; + }; + + struct RenderPassStripeBeginInfoARM + { + using NativeType = VkRenderPassStripeBeginInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeBeginInfoARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( uint32_t stripeInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stripeInfoCount( stripeInfoCount_ ) + , pStripeInfos( pStripeInfos_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassStripeBeginInfoARM( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeBeginInfoARM( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassStripeBeginInfoARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeInfos_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), stripeInfoCount( static_cast( stripeInfos_.size() ) ), pStripeInfos( stripeInfos_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassStripeBeginInfoARM & operator=( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassStripeBeginInfoARM & operator=( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setStripeInfoCount( uint32_t stripeInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + stripeInfoCount = stripeInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & + setPStripeInfos( const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ ) VULKAN_HPP_NOEXCEPT + { + pStripeInfos = pStripeInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassStripeBeginInfoARM & setStripeInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeInfos_ ) VULKAN_HPP_NOEXCEPT + { + stripeInfoCount = static_cast( stripeInfos_.size() ); + pStripeInfos = stripeInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassStripeBeginInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeBeginInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stripeInfoCount, pStripeInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassStripeBeginInfoARM const & ) const = default; +#else + bool operator==( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeInfoCount == rhs.stripeInfoCount ) && ( pStripeInfos == rhs.pStripeInfos ); +# endif + } + + bool operator!=( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeBeginInfoARM; + const void * pNext = {}; + uint32_t stripeInfoCount = {}; + const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassStripeBeginInfoARM; + }; + + struct SemaphoreSubmitInfo + { + using NativeType = VkSemaphoreSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, + uint32_t deviceIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , value( value_ ) + , stageMask( stageMask_ ) + , deviceIndex( deviceIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT + { + stageMask = stageMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndex = deviceIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSubmitInfo const & ) const = default; +#else + bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && + ( deviceIndex == rhs.deviceIndex ); +# endif + } + + bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {}; + uint32_t deviceIndex = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreSubmitInfo; + }; + + using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; + + struct RenderPassStripeSubmitInfoARM + { + using NativeType = VkRenderPassStripeSubmitInfoARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeSubmitInfoARM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( uint32_t stripeSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stripeSemaphoreInfoCount( stripeSemaphoreInfoCount_ ) + , pStripeSemaphoreInfos( pStripeSemaphoreInfos_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassStripeSubmitInfoARM( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeSubmitInfoARM( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassStripeSubmitInfoARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeSemaphoreInfos_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stripeSemaphoreInfoCount( static_cast( stripeSemaphoreInfos_.size() ) ) + , pStripeSemaphoreInfos( stripeSemaphoreInfos_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderPassStripeSubmitInfoARM & operator=( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassStripeSubmitInfoARM & operator=( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfoCount( uint32_t stripeSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + stripeSemaphoreInfoCount = stripeSemaphoreInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & + setPStripeSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pStripeSemaphoreInfos = pStripeSemaphoreInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + stripeSemaphoreInfoCount = static_cast( stripeSemaphoreInfos_.size() ); + pStripeSemaphoreInfos = stripeSemaphoreInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassStripeSubmitInfoARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassStripeSubmitInfoARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stripeSemaphoreInfoCount, pStripeSemaphoreInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassStripeSubmitInfoARM const & ) const = default; +#else + bool operator==( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeSemaphoreInfoCount == rhs.stripeSemaphoreInfoCount ) && + ( pStripeSemaphoreInfos == rhs.pStripeSemaphoreInfos ); +# endif + } + + bool operator!=( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeSubmitInfoARM; + const void * pNext = {}; + uint32_t stripeSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassStripeSubmitInfoARM; + }; + + struct RenderPassSubpassFeedbackInfoEXT + { + using NativeType = VkRenderPassSubpassFeedbackInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + RenderPassSubpassFeedbackInfoEXT( VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_ = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged, + std::array const & description_ = {}, + uint32_t postMergeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassMergeStatus( subpassMergeStatus_ ) + , description( description_ ) + , postMergeIndex( postMergeIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSubpassFeedbackInfoEXT( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSubpassFeedbackInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassSubpassFeedbackInfoEXT & operator=( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkRenderPassSubpassFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSubpassFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( subpassMergeStatus, description, postMergeIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSubpassFeedbackInfoEXT const & ) const = default; +#else + bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( subpassMergeStatus == rhs.subpassMergeStatus ) && ( description == rhs.description ) && ( postMergeIndex == rhs.postMergeIndex ); +# endif + } + + bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t postMergeIndex = {}; + }; + + struct RenderPassSubpassFeedbackCreateInfoEXT + { + using NativeType = VkRenderPassSubpassFeedbackCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pSubpassFeedback( pSubpassFeedback_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSubpassFeedbackCreateInfoEXT( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSubpassFeedbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassSubpassFeedbackCreateInfoEXT & operator=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassSubpassFeedbackCreateInfoEXT & operator=( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & + setPSubpassFeedback( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pSubpassFeedback = pSubpassFeedback_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassSubpassFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSubpassFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pSubpassFeedback ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassSubpassFeedbackCreateInfoEXT const & ) const = default; +#else + bool operator==( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSubpassFeedback == rhs.pSubpassFeedback ); +# endif + } + + bool operator!=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassSubpassFeedbackCreateInfoEXT; + }; + + struct RenderPassTransformBeginInfoQCOM + { + using NativeType = VkRenderPassTransformBeginInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , transform( transform_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassTransformBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, transform ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default; +#else + bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); +# endif + } + + bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + }; + + template <> + struct CppType + { + using Type = RenderPassTransformBeginInfoQCOM; + }; + + struct RenderingAreaInfoKHR + { + using NativeType = VkRenderingAreaInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAreaInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingAreaInfoKHR( uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachmentFormats( pColorAttachmentFormats_ ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderingAreaInfoKHR( RenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingAreaInfoKHR( VkRenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAreaInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingAreaInfoKHR( uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderingAreaInfoKHR & operator=( RenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingAreaInfoKHR & operator=( VkRenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingAreaInfoKHR & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderingAreaInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAreaInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingAreaInfoKHR const & ) const = default; +#else + bool operator==( RenderingAreaInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); +# endif + } + + bool operator!=( RenderingAreaInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAreaInfoKHR; + const void * pNext = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = RenderingAreaInfoKHR; + }; + + struct RenderingAttachmentInfo + { + using NativeType = VkRenderingAttachmentInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + , resolveMode( resolveMode_ ) + , resolveImageView( resolveImageView_ ) + , resolveImageLayout( resolveImageLayout_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , clearValue( clearValue_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAttachmentInfo( *reinterpret_cast( &rhs ) ) + { + } + + RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT + { + resolveMode = resolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT + { + resolveImageView = resolveImageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + resolveImageLayout = resolveImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; + + template <> + struct CppType + { + using Type = RenderingAttachmentInfo; + }; + + using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; + + struct RenderingFragmentDensityMapAttachmentInfoEXT + { + using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderingFragmentDensityMapAttachmentInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, imageLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default; +#else + bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); +# endif + } + + bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + + template <> + struct CppType + { + using Type = RenderingFragmentDensityMapAttachmentInfoEXT; + }; + + struct RenderingFragmentShadingRateAttachmentInfoKHR + { + using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderingFragmentShadingRateAttachmentInfoKHR( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR + RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & + setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default; +#else + bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); +# endif + } + + bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = RenderingFragmentShadingRateAttachmentInfoKHR; + }; + + struct RenderingInfo + { + using NativeType = VkRenderingInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , renderArea( renderArea_ ) + , layerCount( layerCount_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pDepthAttachment( pDepthAttachment_ ) + , pStencilAttachment( pStencilAttachment_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + uint32_t layerCount_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , renderArea( renderArea_ ) + , layerCount( layerCount_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pDepthAttachment( pDepthAttachment_ ) + , pStencilAttachment( pStencilAttachment_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInfo & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthAttachment = pDepthAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & + setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pStencilAttachment = pStencilAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingInfo const & ) const = default; +#else + bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderArea == rhs.renderArea ) && + ( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachments == rhs.pColorAttachments ) && ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment ); +# endif + } + + bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t layerCount = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {}; + }; + + template <> + struct CppType + { + using Type = RenderingInfo; + }; + + using RenderingInfoKHR = RenderingInfo; + + struct ResolveImageInfo2 + { + using NativeType = VkResolveImageInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ResolveImageInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ResolveImageInfo2 const & ) const = default; +#else + bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = ResolveImageInfo2; + }; + + using ResolveImageInfo2KHR = ResolveImageInfo2; + + struct SamplerBlockMatchWindowCreateInfoQCOM + { + using NativeType = VkSamplerBlockMatchWindowCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( + VULKAN_HPP_NAMESPACE::Extent2D windowExtent_ = {}, + VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , windowExtent( windowExtent_ ) + , windowCompareMode( windowCompareMode_ ) + { + } + + VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerBlockMatchWindowCreateInfoQCOM( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerBlockMatchWindowCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + SamplerBlockMatchWindowCreateInfoQCOM & operator=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerBlockMatchWindowCreateInfoQCOM & operator=( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setWindowExtent( VULKAN_HPP_NAMESPACE::Extent2D const & windowExtent_ ) VULKAN_HPP_NOEXCEPT + { + windowExtent = windowExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & + setWindowCompareMode( VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ ) VULKAN_HPP_NOEXCEPT + { + windowCompareMode = windowCompareMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerBlockMatchWindowCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerBlockMatchWindowCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, windowExtent, windowCompareMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerBlockMatchWindowCreateInfoQCOM const & ) const = default; +#else + bool operator==( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( windowExtent == rhs.windowExtent ) && ( windowCompareMode == rhs.windowCompareMode ); +# endif + } + + bool operator!=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D windowExtent = {}; + VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin; + }; + + template <> + struct CppType + { + using Type = SamplerBlockMatchWindowCreateInfoQCOM; + }; + + struct SamplerBorderColorComponentMappingCreateInfoEXT + { + using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , components( components_ ) + , srgb( srgb_ ) + { + } + + VULKAN_HPP_CONSTEXPR + SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT + { + srgb = srgb_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, components, srgb ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default; +#else + bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( components == rhs.components ) && ( srgb == rhs.srgb ); +# endif + } + + bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::Bool32 srgb = {}; + }; + + template <> + struct CppType + { + using Type = SamplerBorderColorComponentMappingCreateInfoEXT; + }; + + struct SamplerCaptureDescriptorDataInfoEXT + { + using NativeType = VkSamplerCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , sampler( sampler_ ) + { + } + + VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCaptureDescriptorDataInfoEXT( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SamplerCaptureDescriptorDataInfoEXT & operator=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerCaptureDescriptorDataInfoEXT & operator=( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampler == rhs.sampler ); +# endif + } + + bool operator!=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; + + template <> + struct CppType + { + using Type = SamplerCaptureDescriptorDataInfoEXT; + }; + + struct SamplerCreateInfo + { + using NativeType = VkSamplerCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , magFilter( magFilter_ ) + , minFilter( minFilter_ ) + , mipmapMode( mipmapMode_ ) + , addressModeU( addressModeU_ ) + , addressModeV( addressModeV_ ) + , addressModeW( addressModeW_ ) + , mipLodBias( mipLodBias_ ) + , anisotropyEnable( anisotropyEnable_ ) + , maxAnisotropy( maxAnisotropy_ ) + , compareEnable( compareEnable_ ) + , compareOp( compareOp_ ) + , minLod( minLod_ ) + , maxLod( maxLod_ ) + , borderColor( borderColor_ ) + , unnormalizedCoordinates( unnormalizedCoordinates_ ) + { + } + + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) {} + + SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + magFilter, + minFilter, + mipmapMode, + addressModeU, + addressModeV, + addressModeW, + mipLodBias, + anisotropyEnable, + maxAnisotropy, + compareEnable, + compareOp, + minLod, + maxLod, + borderColor, + unnormalizedCoordinates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCreateInfo const & ) const = default; +#else + bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && + ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && + ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) && + ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && + ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); +# endif + } + + bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + }; + + template <> + struct CppType + { + using Type = SamplerCreateInfo; + }; + + struct SamplerCubicWeightsCreateInfoQCOM + { + using NativeType = VkSamplerCubicWeightsCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCubicWeightsCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerCubicWeightsCreateInfoQCOM( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , cubicWeights( cubicWeights_ ) + { + } + + VULKAN_HPP_CONSTEXPR SamplerCubicWeightsCreateInfoQCOM( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCubicWeightsCreateInfoQCOM( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCubicWeightsCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + SamplerCubicWeightsCreateInfoQCOM & operator=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerCubicWeightsCreateInfoQCOM & operator=( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & + setCubicWeights( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT + { + cubicWeights = cubicWeights_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerCubicWeightsCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCubicWeightsCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cubicWeights ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCubicWeightsCreateInfoQCOM const & ) const = default; +#else + bool operator==( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights ); +# endif + } + + bool operator!=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCubicWeightsCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom; + }; + + template <> + struct CppType + { + using Type = SamplerCubicWeightsCreateInfoQCOM; + }; + + struct SamplerCustomBorderColorCreateInfoEXT + { + using NativeType = VkSamplerCustomBorderColorCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , customBorderColor( customBorderColor_ ) + , format( format_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & + setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColor = customBorderColor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, customBorderColor, format ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = SamplerCustomBorderColorCreateInfoEXT; + }; + + struct SamplerReductionModeCreateInfo + { + using NativeType = VkSamplerReductionModeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , reductionMode( reductionMode_ ) + { + } + + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerReductionModeCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT + { + reductionMode = reductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, reductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default; +#else + bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode ); +# endif + } + + bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; + }; + + template <> + struct CppType + { + using Type = SamplerReductionModeCreateInfo; + }; + + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; + + struct SamplerYcbcrConversionCreateInfo + { + using NativeType = VkSamplerYcbcrConversionCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , format( format_ ) + , ycbcrModel( ycbcrModel_ ) + , ycbcrRange( ycbcrRange_ ) + , components( components_ ) + , xChromaOffset( xChromaOffset_ ) + , yChromaOffset( yChromaOffset_ ) + , chromaFilter( chromaFilter_ ) + , forceExplicitReconstruction( forceExplicitReconstruction_ ) + { + } + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrModel = ycbcrModel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrRange = ycbcrRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + xChromaOffset = xChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + yChromaOffset = yChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + { + chromaFilter = chromaFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( ycbcrModel == rhs.ycbcrModel ) && + ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && ( xChromaOffset == rhs.xChromaOffset ) && + ( yChromaOffset == rhs.yChromaOffset ) && ( chromaFilter == rhs.chromaFilter ) && + ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); +# endif + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + }; + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionCreateInfo; + }; + + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + + struct SamplerYcbcrConversionImageFormatProperties + { + using NativeType = VkSamplerYcbcrConversionImageFormatProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast( &rhs ) ) + { + } + + SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, combinedImageSamplerDescriptorCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); +# endif + } + + bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + void * pNext = {}; + uint32_t combinedImageSamplerDescriptorCount = {}; + }; + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionImageFormatProperties; + }; + + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + + struct SamplerYcbcrConversionInfo + { + using NativeType = VkSamplerYcbcrConversionInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , conversion( conversion_ ) + { + } + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionInfo( *reinterpret_cast( &rhs ) ) + { + } + + SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT + { + conversion = conversion_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conversion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion ); +# endif + } + + bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; + }; + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionInfo; + }; + + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + + struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM + { + using NativeType = VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , enableYDegamma( enableYDegamma_ ) + , enableCbCrDegamma( enableCbCrDegamma_ ) + { + } + + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & + setEnableYDegamma( VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ ) VULKAN_HPP_NOEXCEPT + { + enableYDegamma = enableYDegamma_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & + setEnableCbCrDegamma( VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ ) VULKAN_HPP_NOEXCEPT + { + enableCbCrDegamma = enableCbCrDegamma_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, enableYDegamma, enableCbCrDegamma ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enableYDegamma == rhs.enableYDegamma ) && ( enableCbCrDegamma == rhs.enableCbCrDegamma ); +# endif + } + + bool operator!=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma = {}; + VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma = {}; + }; + + template <> + struct CppType + { + using Type = SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + }; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenBufferFormatPropertiesQNX + { + using NativeType = VkScreenBufferFormatPropertiesQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferFormatPropertiesQNX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + uint64_t screenUsage_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , format( format_ ) + , externalFormat( externalFormat_ ) + , screenUsage( screenUsage_ ) + , formatFeatures( formatFeatures_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + { + } + + VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenBufferFormatPropertiesQNX( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenBufferFormatPropertiesQNX( *reinterpret_cast( &rhs ) ) + { + } + + ScreenBufferFormatPropertiesQNX & operator=( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ScreenBufferFormatPropertiesQNX & operator=( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkScreenBufferFormatPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenBufferFormatPropertiesQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + format, + externalFormat, + screenUsage, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenBufferFormatPropertiesQNX const & ) const = default; +# else + bool operator==( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && + ( screenUsage == rhs.screenUsage ) && ( formatFeatures == rhs.formatFeatures ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && + ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif + } + + bool operator!=( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferFormatPropertiesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + uint64_t screenUsage = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = ScreenBufferFormatPropertiesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenBufferPropertiesQNX + { + using NativeType = VkScreenBufferPropertiesQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferPropertiesQNX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeBits_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , allocationSize( allocationSize_ ) + , memoryTypeBits( memoryTypeBits_ ) + { + } + + VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenBufferPropertiesQNX( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenBufferPropertiesQNX( *reinterpret_cast( &rhs ) ) + { + } + + ScreenBufferPropertiesQNX & operator=( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ScreenBufferPropertiesQNX & operator=( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkScreenBufferPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenBufferPropertiesQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allocationSize, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenBufferPropertiesQNX const & ) const = default; +# else + bool operator==( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferPropertiesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = ScreenBufferPropertiesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenSurfaceCreateInfoQNX + { + using NativeType = VkScreenSurfaceCreateInfoQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, + struct _screen_context * context_ = {}, + struct _screen_window * window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , context( context_ ) + , window( window_ ) + { + } + + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenSurfaceCreateInfoQNX( *reinterpret_cast( &rhs ) ) + { + } + + ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT + { + context = context_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, context, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default; +# else + bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && ( window == rhs.window ); +# endif + } + + bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {}; + struct _screen_context * context = {}; + struct _screen_window * window = {}; + }; + + template <> + struct CppType + { + using Type = ScreenSurfaceCreateInfoQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + struct SemaphoreCreateInfo + { + using NativeType = VkSemaphoreCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreCreateInfo; + }; + + struct SemaphoreGetFdInfoKHR + { + using NativeType = VkSemaphoreGetFdInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetFdInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default; +#else + bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = SemaphoreGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SemaphoreGetWin32HandleInfoKHR + { + using NativeType = VkSemaphoreGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = SemaphoreGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct SemaphoreGetZirconHandleInfoFUCHSIA + { + using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , handleType( handleType_ ) + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = SemaphoreGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct SemaphoreSignalInfo + { + using NativeType = VkSemaphoreSignalInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphore( semaphore_ ) + , value( value_ ) + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSignalInfo( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + { + value = value_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, value ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSignalInfo const & ) const = default; +#else + bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ); +# endif + } + + bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreSignalInfo; + }; + + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + + struct SemaphoreTypeCreateInfo + { + using NativeType = VkSemaphoreTypeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, + uint64_t initialValue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , semaphoreType( semaphoreType_ ) + , initialValue( initialValue_ ) + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreTypeCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreType = semaphoreType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT + { + initialValue = initialValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphoreType, initialValue ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default; +#else + bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && ( initialValue == rhs.initialValue ); +# endif + } + + bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; + uint64_t initialValue = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreTypeCreateInfo; + }; + + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + + struct SemaphoreWaitInfo + { + using NativeType = VkSemaphoreWaitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, + uint32_t semaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, + const uint64_t * pValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , semaphoreCount( semaphoreCount_ ) + , pSemaphores( pSemaphores_ ) + , pValues( pValues_ ) + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreWaitInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , semaphoreCount( static_cast( semaphores_.size() ) ) + , pSemaphores( semaphores_.data() ) + , pValues( values_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() ); +# else + if ( semaphores_.size() != values_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = semaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSemaphores = pSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & + setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( semaphores_.size() ); + pSemaphores = semaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + semaphoreCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreWaitInfo const & ) const = default; +#else + bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( semaphoreCount == rhs.semaphoreCount ) && + ( pSemaphores == rhs.pSemaphores ) && ( pValues == rhs.pValues ); +# endif + } + + bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; + uint32_t semaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {}; + const uint64_t * pValues = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreWaitInfo; + }; + + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + + struct SetDescriptorBufferOffsetsInfoEXT + { + using NativeType = VkSetDescriptorBufferOffsetsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetDescriptorBufferOffsetsInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t firstSet_ = {}, + uint32_t setCount_ = {}, + const uint32_t * pBufferIndices_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , firstSet( firstSet_ ) + , setCount( setCount_ ) + , pBufferIndices( pBufferIndices_ ) + , pOffsets( pOffsets_ ) + { + } + + VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetDescriptorBufferOffsetsInfoEXT( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SetDescriptorBufferOffsetsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SetDescriptorBufferOffsetsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t firstSet_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & offsets_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , firstSet( firstSet_ ) + , setCount( static_cast( bufferIndices_.size() ) ) + , pBufferIndices( bufferIndices_.data() ) + , pOffsets( offsets_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices_.size() == offsets_.size() ); +# else + if ( bufferIndices_.size() != offsets_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SetDescriptorBufferOffsetsInfoEXT::SetDescriptorBufferOffsetsInfoEXT: bufferIndices_.size() != offsets_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SetDescriptorBufferOffsetsInfoEXT & operator=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SetDescriptorBufferOffsetsInfoEXT & operator=( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT + { + firstSet = firstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setSetCount( uint32_t setCount_ ) VULKAN_HPP_NOEXCEPT + { + setCount = setCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPBufferIndices( const uint32_t * pBufferIndices_ ) VULKAN_HPP_NOEXCEPT + { + pBufferIndices = pBufferIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SetDescriptorBufferOffsetsInfoEXT & + setBufferIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferIndices_ ) VULKAN_HPP_NOEXCEPT + { + setCount = static_cast( bufferIndices_.size() ); + pBufferIndices = bufferIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPOffsets( const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pOffsets = pOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SetDescriptorBufferOffsetsInfoEXT & + setOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & offsets_ ) VULKAN_HPP_NOEXCEPT + { + setCount = static_cast( offsets_.size() ); + pOffsets = offsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSetDescriptorBufferOffsetsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetDescriptorBufferOffsetsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, firstSet, setCount, pBufferIndices, pOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetDescriptorBufferOffsetsInfoEXT const & ) const = default; +#else + bool operator==( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) && + ( setCount == rhs.setCount ) && ( pBufferIndices == rhs.pBufferIndices ) && ( pOffsets == rhs.pOffsets ); +# endif + } + + bool operator!=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetDescriptorBufferOffsetsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t firstSet = {}; + uint32_t setCount = {}; + const uint32_t * pBufferIndices = {}; + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets = {}; + }; + + template <> + struct CppType + { + using Type = SetDescriptorBufferOffsetsInfoEXT; + }; + + struct SetLatencyMarkerInfoNV + { + using NativeType = VkSetLatencyMarkerInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetLatencyMarkerInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( uint64_t presentID_ = {}, + VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentID( presentID_ ) + , marker( marker_ ) + { + } + + VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetLatencyMarkerInfoNV( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + SetLatencyMarkerInfoNV & operator=( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SetLatencyMarkerInfoNV & operator=( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setMarker( VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentID, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetLatencyMarkerInfoNV const & ) const = default; +#else + bool operator==( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetLatencyMarkerInfoNV; + const void * pNext = {}; + uint64_t presentID = {}; + VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart; + }; + + template <> + struct CppType + { + using Type = SetLatencyMarkerInfoNV; + }; + + struct SetStateFlagsIndirectCommandNV + { + using NativeType = VkSetStateFlagsIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {} + + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetStateFlagsIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( data ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default; +#else + bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( data == rhs.data ); +# endif + } + + bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t data = {}; + }; + + struct ShaderCreateInfoEXT + { + using NativeType = VkShaderCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ = {}, + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary, + size_t codeSize_ = {}, + const void * pCode_ = {}, + const char * pName_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stage( stage_ ) + , nextStage( nextStage_ ) + , codeType( codeType_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + , pName( pName_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderCreateInfoEXT( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ShaderCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_, + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_, + const char * pName_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stage( stage_ ) + , nextStage( nextStage_ ) + , codeType( codeType_ ) + , codeSize( code_.size() * sizeof( T ) ) + , pCode( code_.data() ) + , pName( pName_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ShaderCreateInfoEXT & operator=( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderCreateInfoEXT & operator=( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setNextStage( VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ ) VULKAN_HPP_NOEXCEPT + { + nextStage = nextStage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeType( VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ ) VULKAN_HPP_NOEXCEPT + { + codeType = codeType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPCode( const void * pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ShaderCreateInfoEXT & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * sizeof( T ); + pCode = code_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderCreateInfoEXT & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderCreateInfoEXT & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & + setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkShaderCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + stage, + nextStage, + codeType, + codeSize, + pCode, + pName, + setLayoutCount, + pSetLayouts, + pushConstantRangeCount, + pPushConstantRanges, + pSpecializationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) + return cmp; + if ( auto cmp = nextStage <=> rhs.nextStage; cmp != 0 ) + return cmp; + if ( auto cmp = codeType <=> rhs.codeType; cmp != 0 ) + return cmp; + if ( auto cmp = codeSize <=> rhs.codeSize; cmp != 0 ) + return cmp; + if ( auto cmp = pCode <=> rhs.pCode; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = setLayoutCount <=> rhs.setLayoutCount; cmp != 0 ) + return cmp; + if ( auto cmp = pSetLayouts <=> rhs.pSetLayouts; cmp != 0 ) + return cmp; + if ( auto cmp = pushConstantRangeCount <=> rhs.pushConstantRangeCount; cmp != 0 ) + return cmp; + if ( auto cmp = pPushConstantRanges <=> rhs.pPushConstantRanges; cmp != 0 ) + return cmp; + if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( nextStage == rhs.nextStage ) && + ( codeType == rhs.codeType ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ) && + ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( setLayoutCount == rhs.setLayoutCount ) && + ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage = {}; + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary; + size_t codeSize = {}; + const void * pCode = {}; + const char * pName = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; + }; + + template <> + struct CppType + { + using Type = ShaderCreateInfoEXT; + }; + + struct ShaderModuleCreateInfo + { + using NativeType = VkShaderModuleCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, + size_t codeSize_ = {}, + const uint32_t * pCode_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + { + } + + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = code_.size() * 4; + pCode = code_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, codeSize, pCode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleCreateInfo const & ) const = default; +#else + bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); +# endif + } + + bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t * pCode = {}; + }; + + template <> + struct CppType + { + using Type = ShaderModuleCreateInfo; + }; + + struct ShaderModuleIdentifierEXT + { + using NativeType = VkShaderModuleIdentifierEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleIdentifierEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( uint32_t identifierSize_ = {}, + std::array const & identifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , identifierSize( identifierSize_ ) + , identifier( identifier_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleIdentifierEXT( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleIdentifierEXT( *reinterpret_cast( &rhs ) ) + { + } + + ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderModuleIdentifierEXT & operator=( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkShaderModuleIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleIdentifierEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, identifierSize, identifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleIdentifierEXT const & ) const = default; +#else + bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( identifier == rhs.identifier ); +# endif + } + + bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleIdentifierEXT; + void * pNext = {}; + uint32_t identifierSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D identifier = {}; + }; + + template <> + struct CppType + { + using Type = ShaderModuleIdentifierEXT; + }; + + struct ShaderModuleValidationCacheCreateInfoEXT + { + using NativeType = VkShaderModuleValidationCacheCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , validationCache( validationCache_ ) + { + } + + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & + setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT + { + validationCache = validationCache_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, validationCache ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); +# endif + } + + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; + }; + + template <> + struct CppType + { + using Type = ShaderModuleValidationCacheCreateInfoEXT; + }; + + struct ShaderResourceUsageAMD + { + using NativeType = VkShaderResourceUsageAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, + uint32_t numUsedSgprs_ = {}, + uint32_t ldsSizePerLocalWorkGroup_ = {}, + size_t ldsUsageSizeInBytes_ = {}, + size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : numUsedVgprs( numUsedVgprs_ ) + , numUsedSgprs( numUsedSgprs_ ) + , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) + , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ) + , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) + { + } + + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderResourceUsageAMD( *reinterpret_cast( &rhs ) ) + { + } + + ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderResourceUsageAMD const & ) const = default; +#else + bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && + ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); +# endif + } + + bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t numUsedVgprs = {}; + uint32_t numUsedSgprs = {}; + uint32_t ldsSizePerLocalWorkGroup = {}; + size_t ldsUsageSizeInBytes = {}; + size_t scratchMemUsageInBytes = {}; + }; + + struct ShaderStatisticsInfoAMD + { + using NativeType = VkShaderStatisticsInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, + uint32_t numPhysicalVgprs_ = {}, + uint32_t numPhysicalSgprs_ = {}, + uint32_t numAvailableVgprs_ = {}, + uint32_t numAvailableSgprs_ = {}, + std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderStageMask( shaderStageMask_ ) + , resourceUsage( resourceUsage_ ) + , numPhysicalVgprs( numPhysicalVgprs_ ) + , numPhysicalSgprs( numPhysicalSgprs_ ) + , numAvailableVgprs( numAvailableVgprs_ ) + , numAvailableSgprs( numAvailableSgprs_ ) + , computeWorkGroupSize( computeWorkGroupSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderStatisticsInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default; +#else + bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && + ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && + ( computeWorkGroupSize == rhs.computeWorkGroupSize ); +# endif + } + + bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; + uint32_t numPhysicalVgprs = {}; + uint32_t numPhysicalSgprs = {}; + uint32_t numAvailableVgprs = {}; + uint32_t numAvailableSgprs = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; + }; + + struct SharedPresentSurfaceCapabilitiesKHR + { + using NativeType = VkSharedPresentSurfaceCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sharedPresentSupportedUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); +# endif + } + + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; + }; + + template <> + struct CppType + { + using Type = SharedPresentSurfaceCapabilitiesKHR; + }; + + struct SparseImageFormatProperties + { + using NativeType = VkSparseImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , imageGranularity( imageGranularity_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( aspectMask, imageGranularity, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties const & ) const = default; +#else + bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + }; + + struct SparseImageFormatProperties2 + { + using NativeType = VkSparseImageFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , properties( properties_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties2( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties2 const & ) const = default; +#else + bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; + }; + + template <> + struct CppType + { + using Type = SparseImageFormatProperties2; + }; + + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + + struct SparseImageMemoryRequirements + { + using NativeType = VkSparseImageMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, + uint32_t imageMipTailFirstLod_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + , imageMipTailFirstLod( imageMipTailFirstLod_ ) + , imageMipTailSize( imageMipTailSize_ ) + , imageMipTailOffset( imageMipTailOffset_ ) + , imageMipTailStride( imageMipTailStride_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) && + ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) && + ( imageMipTailStride == rhs.imageMipTailStride ); +# endif + } + + bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + }; + + struct SparseImageMemoryRequirements2 + { + using NativeType = VkSparseImageMemoryRequirements2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryRequirements( memoryRequirements_ ) + { + } + + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements2( *reinterpret_cast( &rhs ) ) + { + } + + SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; + }; + + template <> + struct CppType + { + using Type = SparseImageMemoryRequirements2; + }; + + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + +#if defined( VK_USE_PLATFORM_GGP ) + struct StreamDescriptorSurfaceCreateInfoGGP + { + using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, + GgpStreamDescriptor streamDescriptor_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , streamDescriptor( streamDescriptor_ ) + { + } + + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast( &rhs ) ) + { + } + + StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & + setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + streamDescriptor = streamDescriptor_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, streamDescriptor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); + } + + bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; + GgpStreamDescriptor streamDescriptor = {}; + }; + + template <> + struct CppType + { + using Type = StreamDescriptorSurfaceCreateInfoGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + struct StridedDeviceAddressRegionKHR + { + using NativeType = VkStridedDeviceAddressRegionKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + , stride( stride_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : StridedDeviceAddressRegionKHR( *reinterpret_cast( &rhs ) ) + { + } + + StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( deviceAddress, stride, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default; +#else + bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size ); +# endif + } + + bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + struct SubmitInfo + { + using NativeType = VkSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, + uint32_t commandBufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , pWaitDstStageMask( pWaitDstStageMask_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBuffers( pCommandBuffers_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , pWaitDstStageMask( waitDstStageMask_.data() ) + , commandBufferCount( static_cast( commandBuffers_.size() ) ) + , pCommandBuffers( commandBuffers_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() ); +# else + if ( waitSemaphores_.size() != waitDstStageMask_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & + setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ ) + VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitDstStageMask_.size() ); + pWaitDstStageMask = waitDstStageMask_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & + setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBuffers_.size() ); + pCommandBuffers = commandBuffers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & + setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo const & ) const = default; +#else + bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBuffers == rhs.pCommandBuffers ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); +# endif + } + + bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + }; + + template <> + struct CppType + { + using Type = SubmitInfo; + }; + + struct SubmitInfo2 + { + using NativeType = VkSubmitInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, + uint32_t waitSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {}, + uint32_t commandBufferInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {}, + uint32_t signalSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ) + , pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ) + , commandBufferInfoCount( commandBufferInfoCount_ ) + , pCommandBufferInfos( pCommandBufferInfos_ ) + , signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ) + , pSignalSemaphoreInfos( pSignalSemaphoreInfos_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferInfos_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , waitSemaphoreInfoCount( static_cast( waitSemaphoreInfos_.size() ) ) + , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ) + , commandBufferInfoCount( static_cast( commandBufferInfos_.size() ) ) + , pCommandBufferInfos( commandBufferInfos_.data() ) + , signalSemaphoreInfoCount( static_cast( signalSemaphoreInfos_.size() ) ) + , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = waitSemaphoreInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreInfos = pWaitSemaphoreInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setWaitSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreInfoCount = static_cast( waitSemaphoreInfos_.size() ); + pWaitSemaphoreInfos = waitSemaphoreInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = commandBufferInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferInfos = pCommandBufferInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setCommandBufferInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = static_cast( commandBufferInfos_.size() ); + pCommandBufferInfos = commandBufferInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = signalSemaphoreInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreInfos = pSignalSemaphoreInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setSignalSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreInfoCount = static_cast( signalSemaphoreInfos_.size() ); + pSignalSemaphoreInfos = signalSemaphoreInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + waitSemaphoreInfoCount, + pWaitSemaphoreInfos, + commandBufferInfoCount, + pCommandBufferInfos, + signalSemaphoreInfoCount, + pSignalSemaphoreInfos ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubmitInfo2 const & ) const = default; +#else + bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) && + ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && ( commandBufferInfoCount == rhs.commandBufferInfoCount ) && + ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) && + ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos ); +# endif + } + + bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubmitFlags flags = {}; + uint32_t waitSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {}; + uint32_t commandBufferInfoCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {}; + uint32_t signalSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {}; + }; + + template <> + struct CppType + { + using Type = SubmitInfo2; + }; + + using SubmitInfo2KHR = SubmitInfo2; + + struct SubpassBeginInfo + { + using NativeType = VkSubpassBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , contents( contents_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassBeginInfo( *reinterpret_cast( &rhs ) ) {} + + SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT + { + contents = contents_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, contents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassBeginInfo const & ) const = default; +#else + bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents ); +# endif + } + + bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; + }; + + template <> + struct CppType + { + using Type = SubpassBeginInfo; + }; + + using SubpassBeginInfoKHR = SubpassBeginInfo; + + struct SubpassDescriptionDepthStencilResolve + { + using NativeType = VkSubpassDescriptionDepthStencilResolve; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , depthResolveMode( depthResolveMode_ ) + , stencilResolveMode( stencilResolveMode_ ) + , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescriptionDepthStencilResolve( *reinterpret_cast( &rhs ) ) + { + } + + SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + depthResolveMode = depthResolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + stencilResolveMode = stencilResolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default; +#else + bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) && + ( stencilResolveMode == rhs.stencilResolveMode ) && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); +# endif + } + + bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {}; + }; + + template <> + struct CppType + { + using Type = SubpassDescriptionDepthStencilResolve; + }; + + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; + + struct SubpassEndInfo + { + using NativeType = VkSubpassEndInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {} + + VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassEndInfo( *reinterpret_cast( &rhs ) ) {} + + SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassEndInfo const & ) const = default; +#else + bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; + const void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = SubpassEndInfo; + }; + + using SubpassEndInfoKHR = SubpassEndInfo; + + struct SubpassFragmentDensityMapOffsetEndInfoQCOM + { + using NativeType = VkSubpassFragmentDensityMapOffsetEndInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( uint32_t fragmentDensityOffsetCount_ = {}, + const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fragmentDensityOffsetCount( fragmentDensityOffsetCount_ ) + , pFragmentDensityOffsets( pFragmentDensityOffsets_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassFragmentDensityMapOffsetEndInfoQCOM( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassFragmentDensityMapOffsetEndInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassFragmentDensityMapOffsetEndInfoQCOM( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fragmentDensityOffsets_, const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , fragmentDensityOffsetCount( static_cast( fragmentDensityOffsets_.size() ) ) + , pFragmentDensityOffsets( fragmentDensityOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & + setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityOffsetCount = fragmentDensityOffsetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & + setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pFragmentDensityOffsets = pFragmentDensityOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityOffsetCount = static_cast( fragmentDensityOffsets_.size() ); + pFragmentDensityOffsets = fragmentDensityOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassFragmentDensityMapOffsetEndInfoQCOM const & ) const = default; +#else + bool operator==( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount ) && + ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets ); +# endif + } + + bool operator!=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; + const void * pNext = {}; + uint32_t fragmentDensityOffsetCount = {}; + const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {}; + }; + + template <> + struct CppType + { + using Type = SubpassFragmentDensityMapOffsetEndInfoQCOM; + }; + + struct SubpassResolvePerformanceQueryEXT + { + using NativeType = VkSubpassResolvePerformanceQueryEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassResolvePerformanceQueryEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimal_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , optimal( optimal_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassResolvePerformanceQueryEXT( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassResolvePerformanceQueryEXT( *reinterpret_cast( &rhs ) ) + { + } + + SubpassResolvePerformanceQueryEXT & operator=( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassResolvePerformanceQueryEXT & operator=( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubpassResolvePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassResolvePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, optimal ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassResolvePerformanceQueryEXT const & ) const = default; +#else + bool operator==( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimal == rhs.optimal ); +# endif + } + + bool operator!=( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassResolvePerformanceQueryEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 optimal = {}; + }; + + template <> + struct CppType + { + using Type = SubpassResolvePerformanceQueryEXT; + }; + + struct SubpassShadingPipelineCreateInfoHUAWEI + { + using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPass, subpass ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default; +#else + bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ); +# endif + } + + bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + }; + + template <> + struct CppType + { + using Type = SubpassShadingPipelineCreateInfoHUAWEI; + }; + + struct SubresourceHostMemcpySizeEXT + { + using NativeType = VkSubresourceHostMemcpySizeEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceHostMemcpySizeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( SubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceHostMemcpySizeEXT( VkSubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceHostMemcpySizeEXT( *reinterpret_cast( &rhs ) ) + { + } + + SubresourceHostMemcpySizeEXT & operator=( SubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubresourceHostMemcpySizeEXT & operator=( VkSubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubresourceHostMemcpySizeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceHostMemcpySizeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceHostMemcpySizeEXT const & ) const = default; +#else + bool operator==( SubresourceHostMemcpySizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); +# endif + } + + bool operator!=( SubresourceHostMemcpySizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceHostMemcpySizeEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = SubresourceHostMemcpySizeEXT; + }; + + struct SubresourceLayout2KHR + { + using NativeType = VkSubresourceLayout2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , subresourceLayout( subresourceLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( SubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceLayout2KHR( VkSubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceLayout2KHR( *reinterpret_cast( &rhs ) ) + { + } + + SubresourceLayout2KHR & operator=( SubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubresourceLayout2KHR & operator=( VkSubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubresourceLayout2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subresourceLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceLayout2KHR const & ) const = default; +#else + bool operator==( SubresourceLayout2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subresourceLayout == rhs.subresourceLayout ); +# endif + } + + bool operator!=( SubresourceLayout2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {}; + }; + + template <> + struct CppType + { + using Type = SubresourceLayout2KHR; + }; + + using SubresourceLayout2EXT = SubresourceLayout2KHR; + + struct SurfaceCapabilities2EXT + { + using NativeType = VkSurfaceCapabilities2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + , supportedSurfaceCounters( supportedSurfaceCounters_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2EXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minImageCount, + maxImageCount, + currentExtent, + minImageExtent, + maxImageExtent, + maxImageArrayLayers, + supportedTransforms, + currentTransform, + supportedCompositeAlpha, + supportedUsageFlags, + supportedSurfaceCounters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2EXT const & ) const = default; +#else + bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && + ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) && + ( currentTransform == rhs.currentTransform ) && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ) && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); +# endif + } + + bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void * pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilities2EXT; + }; + + struct SurfaceCapabilitiesKHR + { + using NativeType = VkSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minImageCount, + maxImageCount, + currentExtent, + minImageExtent, + maxImageExtent, + maxImageArrayLayers, + supportedTransforms, + currentTransform, + supportedCompositeAlpha, + supportedUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) && + ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && ( supportedUsageFlags == rhs.supportedUsageFlags ); +# endif + } + + bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + }; + + struct SurfaceCapabilities2KHR + { + using NativeType = VkSurfaceCapabilities2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , surfaceCapabilities( surfaceCapabilities_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2KHR( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceCapabilities ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2KHR const & ) const = default; +#else + bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities ); +# endif + } + + bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilities2KHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceCapabilitiesFullScreenExclusiveEXT + { + using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & + setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusiveSupported = fullScreenExclusiveSupported_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fullScreenExclusiveSupported ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default; +# else + bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); +# endif + } + + bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilitiesFullScreenExclusiveEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SurfaceCapabilitiesPresentBarrierNV + { + using NativeType = VkSurfaceCapabilitiesPresentBarrierNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentBarrierNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentBarrierSupported( presentBarrierSupported_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesPresentBarrierNV( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesPresentBarrierNV( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceCapabilitiesPresentBarrierNV & operator=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceCapabilitiesPresentBarrierNV & operator=( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV & + setPresentBarrierSupported( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ ) VULKAN_HPP_NOEXCEPT + { + presentBarrierSupported = presentBarrierSupported_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesPresentBarrierNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentBarrierSupported ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesPresentBarrierNV const & ) const = default; +#else + bool operator==( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierSupported == rhs.presentBarrierSupported ); +# endif + } + + bool operator!=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentBarrierNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilitiesPresentBarrierNV; + }; + + struct SurfaceFormatKHR + { + using NativeType = VkSurfaceFormatKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , colorSpace( colorSpace_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormatKHR( *reinterpret_cast( &rhs ) ) {} + + SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( format, colorSpace ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormatKHR const & ) const = default; +#else + bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace ); +# endif + } + + bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + }; + + struct SurfaceFormat2KHR + { + using NativeType = VkSurfaceFormat2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , surfaceFormat( surfaceFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormat2KHR( *reinterpret_cast( &rhs ) ) {} + + SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormat2KHR const & ) const = default; +#else + bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat ); +# endif + } + + bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceFormat2KHR; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceFullScreenExclusiveInfoEXT + { + using NativeType = VkSurfaceFullScreenExclusiveInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , fullScreenExclusive( fullScreenExclusive_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & + setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusive = fullScreenExclusive_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fullScreenExclusive ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive ); +# endif + } + + bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; + }; + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveInfoEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceFullScreenExclusiveWin32InfoEXT + { + using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , hmonitor( hmonitor_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT + { + hmonitor = hmonitor_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hmonitor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor ); +# endif + } + + bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + const void * pNext = {}; + HMONITOR hmonitor = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceFullScreenExclusiveWin32InfoEXT; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SurfacePresentModeCompatibilityEXT + { + using NativeType = VkSurfacePresentModeCompatibilityEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeCompatibilityEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( uint32_t presentModeCount_ = {}, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentModeCount( presentModeCount_ ) + , pPresentModes( pPresentModes_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfacePresentModeCompatibilityEXT( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentModeCompatibilityEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SurfacePresentModeCompatibilityEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SurfacePresentModeCompatibilityEXT & operator=( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfacePresentModeCompatibilityEXT & operator=( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = presentModeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SurfacePresentModeCompatibilityEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSurfacePresentModeCompatibilityEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentModeCompatibilityEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfacePresentModeCompatibilityEXT const & ) const = default; +#else + bool operator==( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeCompatibilityEXT; + void * pNext = {}; + uint32_t presentModeCount = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = SurfacePresentModeCompatibilityEXT; + }; + + struct SurfacePresentModeEXT + { + using NativeType = VkSurfacePresentModeEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentMode( presentMode_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfacePresentModeEXT( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentModeEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfacePresentModeEXT & operator=( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfacePresentModeEXT & operator=( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSurfacePresentModeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfacePresentModeEXT const & ) const = default; +#else + bool operator==( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMode == rhs.presentMode ); +# endif + } + + bool operator!=( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + }; + + template <> + struct CppType + { + using Type = SurfacePresentModeEXT; + }; + + struct SurfacePresentScalingCapabilitiesEXT + { + using NativeType = VkSurfacePresentScalingCapabilitiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentScalingCapabilitiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , supportedPresentScaling( supportedPresentScaling_ ) + , supportedPresentGravityX( supportedPresentGravityX_ ) + , supportedPresentGravityY( supportedPresentGravityY_ ) + , minScaledImageExtent( minScaledImageExtent_ ) + , maxScaledImageExtent( maxScaledImageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfacePresentScalingCapabilitiesEXT( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentScalingCapabilitiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + SurfacePresentScalingCapabilitiesEXT & operator=( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfacePresentScalingCapabilitiesEXT & operator=( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & + setSupportedPresentScaling( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling_ ) VULKAN_HPP_NOEXCEPT + { + supportedPresentScaling = supportedPresentScaling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & + setSupportedPresentGravityX( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX_ ) VULKAN_HPP_NOEXCEPT + { + supportedPresentGravityX = supportedPresentGravityX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & + setSupportedPresentGravityY( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY_ ) VULKAN_HPP_NOEXCEPT + { + supportedPresentGravityY = supportedPresentGravityY_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & + setMinScaledImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & minScaledImageExtent_ ) VULKAN_HPP_NOEXCEPT + { + minScaledImageExtent = minScaledImageExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & + setMaxScaledImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxScaledImageExtent_ ) VULKAN_HPP_NOEXCEPT + { + maxScaledImageExtent = maxScaledImageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSurfacePresentScalingCapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfacePresentScalingCapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedPresentScaling, supportedPresentGravityX, supportedPresentGravityY, minScaledImageExtent, maxScaledImageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfacePresentScalingCapabilitiesEXT const & ) const = default; +#else + bool operator==( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedPresentScaling == rhs.supportedPresentScaling ) && + ( supportedPresentGravityX == rhs.supportedPresentGravityX ) && ( supportedPresentGravityY == rhs.supportedPresentGravityY ) && + ( minScaledImageExtent == rhs.minScaledImageExtent ) && ( maxScaledImageExtent == rhs.maxScaledImageExtent ); +# endif + } + + bool operator!=( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentScalingCapabilitiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY = {}; + VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent = {}; + }; + + template <> + struct CppType + { + using Type = SurfacePresentScalingCapabilitiesEXT; + }; + + struct SurfaceProtectedCapabilitiesKHR + { + using NativeType = VkSurfaceProtectedCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , supportsProtected( supportsProtected_ ) + { + } + + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT + { + supportsProtected = supportsProtected_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportsProtected ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected ); +# endif + } + + bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceProtectedCapabilitiesKHR; + }; + + struct SwapchainCounterCreateInfoEXT + { + using NativeType = VkSwapchainCounterCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , surfaceCounters( surfaceCounters_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCounterCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & + setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT + { + surfaceCounters = surfaceCounters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, surfaceCounters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters ); +# endif + } + + bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainCounterCreateInfoEXT; + }; + + struct SwapchainCreateInfoKHR + { + using NativeType = VkSwapchainCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, + uint32_t minImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + uint32_t imageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, + uint32_t minImageCount_, + VULKAN_HPP_NAMESPACE::Format imageFormat_, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, + uint32_t imageArrayLayers_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT + { + minImageCount = minImageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT + { + imageColorSpace = imageColorSpace_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + { + imageSharingMode = imageSharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR & + setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT + { + preTransform = preTransform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + { + compositeAlpha = compositeAlpha_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT + { + clipped = clipped_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + oldSwapchain = oldSwapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + surface, + minImageCount, + imageFormat, + imageColorSpace, + imageExtent, + imageArrayLayers, + imageUsage, + imageSharingMode, + queueFamilyIndexCount, + pQueueFamilyIndices, + preTransform, + compositeAlpha, + presentMode, + clipped, + oldSwapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) && + ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && ( imageColorSpace == rhs.imageColorSpace ) && + ( imageExtent == rhs.imageExtent ) && ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) && + ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && ( compositeAlpha == rhs.compositeAlpha ) && + ( presentMode == rhs.presentMode ) && ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain ); +# endif + } + + bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainCreateInfoKHR; + }; + + struct SwapchainDisplayNativeHdrCreateInfoAMD + { + using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , localDimmingEnable( localDimmingEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & + setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT + { + localDimmingEnable = localDimmingEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, localDimmingEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default; +#else + bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable ); +# endif + } + + bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainDisplayNativeHdrCreateInfoAMD; + }; + + struct SwapchainLatencyCreateInfoNV + { + using NativeType = VkSwapchainLatencyCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainLatencyCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , latencyModeEnable( latencyModeEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainLatencyCreateInfoNV( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainLatencyCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainLatencyCreateInfoNV & operator=( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainLatencyCreateInfoNV & operator=( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setLatencyModeEnable( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ ) VULKAN_HPP_NOEXCEPT + { + latencyModeEnable = latencyModeEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainLatencyCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainLatencyCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, latencyModeEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainLatencyCreateInfoNV const & ) const = default; +#else + bool operator==( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( latencyModeEnable == rhs.latencyModeEnable ); +# endif + } + + bool operator!=( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainLatencyCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainLatencyCreateInfoNV; + }; + + struct SwapchainPresentBarrierCreateInfoNV + { + using NativeType = VkSwapchainPresentBarrierCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentBarrierCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentBarrierEnable( presentBarrierEnable_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentBarrierCreateInfoNV( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentBarrierCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainPresentBarrierCreateInfoNV & operator=( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainPresentBarrierCreateInfoNV & operator=( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & + setPresentBarrierEnable( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ ) VULKAN_HPP_NOEXCEPT + { + presentBarrierEnable = presentBarrierEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainPresentBarrierCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentBarrierCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentBarrierEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentBarrierCreateInfoNV const & ) const = default; +#else + bool operator==( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierEnable == rhs.presentBarrierEnable ); +# endif + } + + bool operator!=( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentBarrierCreateInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentBarrierCreateInfoNV; + }; + + struct SwapchainPresentFenceInfoEXT + { + using NativeType = VkSwapchainPresentFenceInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentFenceInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::Fence * pFences_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchainCount( swapchainCount_ ) + , pFences( pFences_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentFenceInfoEXT( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentFenceInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentFenceInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fences_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( fences_.size() ) ), pFences( fences_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainPresentFenceInfoEXT & operator=( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainPresentFenceInfoEXT & operator=( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPFences( const VULKAN_HPP_NAMESPACE::Fence * pFences_ ) VULKAN_HPP_NOEXCEPT + { + pFences = pFences_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentFenceInfoEXT & + setFences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fences_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( fences_.size() ); + pFences = fences_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainPresentFenceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentFenceInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pFences ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentFenceInfoEXT const & ) const = default; +#else + bool operator==( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pFences == rhs.pFences ); +# endif + } + + bool operator!=( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentFenceInfoEXT; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::Fence * pFences = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentFenceInfoEXT; + }; + + struct SwapchainPresentModeInfoEXT + { + using NativeType = VkSwapchainPresentModeInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModeInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , swapchainCount( swapchainCount_ ) + , pPresentModes( pPresentModes_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentModeInfoEXT( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentModeInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModeInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainPresentModeInfoEXT & operator=( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainPresentModeInfoEXT & operator=( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModeInfoEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainPresentModeInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentModeInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, swapchainCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentModeInfoEXT const & ) const = default; +#else + bool operator==( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModeInfoEXT; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentModeInfoEXT; + }; + + struct SwapchainPresentModesCreateInfoEXT + { + using NativeType = VkSwapchainPresentModesCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModesCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( uint32_t presentModeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , presentModeCount( presentModeCount_ ) + , pPresentModes( pPresentModes_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentModesCreateInfoEXT( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentModesCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModesCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainPresentModesCreateInfoEXT & operator=( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainPresentModesCreateInfoEXT & operator=( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = presentModeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & + setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModesCreateInfoEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT + { + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainPresentModesCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentModesCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeCount, pPresentModes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentModesCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif + } + + bool operator!=( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModesCreateInfoEXT; + const void * pNext = {}; + uint32_t presentModeCount = {}; + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentModesCreateInfoEXT; + }; + + struct SwapchainPresentScalingCreateInfoEXT + { + using NativeType = VkSwapchainPresentScalingCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentScalingCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , scalingBehavior( scalingBehavior_ ) + , presentGravityX( presentGravityX_ ) + , presentGravityY( presentGravityY_ ) + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentScalingCreateInfoEXT( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentScalingCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + SwapchainPresentScalingCreateInfoEXT & operator=( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SwapchainPresentScalingCreateInfoEXT & operator=( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setScalingBehavior( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ ) VULKAN_HPP_NOEXCEPT + { + scalingBehavior = scalingBehavior_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setPresentGravityX( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ ) VULKAN_HPP_NOEXCEPT + { + presentGravityX = presentGravityX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setPresentGravityY( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ ) VULKAN_HPP_NOEXCEPT + { + presentGravityY = presentGravityY_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkSwapchainPresentScalingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentScalingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, scalingBehavior, presentGravityX, presentGravityY ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainPresentScalingCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalingBehavior == rhs.scalingBehavior ) && ( presentGravityX == rhs.presentGravityX ) && + ( presentGravityY == rhs.presentGravityY ); +# endif + } + + bool operator!=( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentScalingCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY = {}; + }; + + template <> + struct CppType + { + using Type = SwapchainPresentScalingCreateInfoEXT; + }; + + struct TextureLODGatherFormatPropertiesAMD + { + using NativeType = VkTextureLODGatherFormatPropertiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) + { + } + + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast( &rhs ) ) + { + } + + TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default; +#else + bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); +# endif + } + + bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; + }; + + template <> + struct CppType + { + using Type = TextureLODGatherFormatPropertiesAMD; + }; + + struct TilePropertiesQCOM + { + using NativeType = VkTilePropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTilePropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent3D tileSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D apronSize_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D origin_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , tileSize( tileSize_ ) + , apronSize( apronSize_ ) + , origin( origin_ ) + { + } + + VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TilePropertiesQCOM( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : TilePropertiesQCOM( *reinterpret_cast( &rhs ) ) {} + + TilePropertiesQCOM & operator=( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TilePropertiesQCOM & operator=( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setTileSize( VULKAN_HPP_NAMESPACE::Extent3D const & tileSize_ ) VULKAN_HPP_NOEXCEPT + { + tileSize = tileSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & apronSize_ ) VULKAN_HPP_NOEXCEPT + { + apronSize = apronSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setOrigin( VULKAN_HPP_NAMESPACE::Offset2D const & origin_ ) VULKAN_HPP_NOEXCEPT + { + origin = origin_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkTilePropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTilePropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, tileSize, apronSize, origin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TilePropertiesQCOM const & ) const = default; +#else + bool operator==( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileSize == rhs.tileSize ) && ( apronSize == rhs.apronSize ) && ( origin == rhs.origin ); +# endif + } + + bool operator!=( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTilePropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent3D tileSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D apronSize = {}; + VULKAN_HPP_NAMESPACE::Offset2D origin = {}; + }; + + template <> + struct CppType + { + using Type = TilePropertiesQCOM; + }; + + struct TimelineSemaphoreSubmitInfo + { + using NativeType = VkTimelineSemaphoreSubmitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValueCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , waitSemaphoreValueCount( waitSemaphoreValueCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValueCount( signalSemaphoreValueCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + { + } + + VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : TimelineSemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreValueCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValueCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = waitSemaphoreValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & + setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = signalSemaphoreValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & + setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default; +#else + bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); +# endif + } + + bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreValueCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValueCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + + template <> + struct CppType + { + using Type = TimelineSemaphoreSubmitInfo; + }; + + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; + + struct TraceRaysIndirectCommand2KHR + { + using NativeType = VkTraceRaysIndirectCommand2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : raygenShaderRecordAddress( raygenShaderRecordAddress_ ) + , raygenShaderRecordSize( raygenShaderRecordSize_ ) + , missShaderBindingTableAddress( missShaderBindingTableAddress_ ) + , missShaderBindingTableSize( missShaderBindingTableSize_ ) + , missShaderBindingTableStride( missShaderBindingTableStride_ ) + , hitShaderBindingTableAddress( hitShaderBindingTableAddress_ ) + , hitShaderBindingTableSize( hitShaderBindingTableSize_ ) + , hitShaderBindingTableStride( hitShaderBindingTableStride_ ) + , callableShaderBindingTableAddress( callableShaderBindingTableAddress_ ) + , callableShaderBindingTableSize( callableShaderBindingTableSize_ ) + , callableShaderBindingTableStride( callableShaderBindingTableStride_ ) + , width( width_ ) + , height( height_ ) + , depth( depth_ ) + { + } + + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommand2KHR( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TraceRaysIndirectCommand2KHR( *reinterpret_cast( &rhs ) ) + { + } + + TraceRaysIndirectCommand2KHR & operator=( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TraceRaysIndirectCommand2KHR & operator=( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setRaygenShaderRecordAddress( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ ) VULKAN_HPP_NOEXCEPT + { + raygenShaderRecordAddress = raygenShaderRecordAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setRaygenShaderRecordSize( VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ ) VULKAN_HPP_NOEXCEPT + { + raygenShaderRecordSize = raygenShaderRecordSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT + { + missShaderBindingTableAddress = missShaderBindingTableAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + { + missShaderBindingTableSize = missShaderBindingTableSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + missShaderBindingTableStride = missShaderBindingTableStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableAddress = hitShaderBindingTableAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableSize = hitShaderBindingTableSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableStride = hitShaderBindingTableStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT + { + callableShaderBindingTableAddress = callableShaderBindingTableAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + { + callableShaderBindingTableSize = callableShaderBindingTableSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + callableShaderBindingTableStride = callableShaderBindingTableStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkTraceRaysIndirectCommand2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommand2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( raygenShaderRecordAddress, + raygenShaderRecordSize, + missShaderBindingTableAddress, + missShaderBindingTableSize, + missShaderBindingTableStride, + hitShaderBindingTableAddress, + hitShaderBindingTableSize, + hitShaderBindingTableStride, + callableShaderBindingTableAddress, + callableShaderBindingTableSize, + callableShaderBindingTableStride, + width, + height, + depth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TraceRaysIndirectCommand2KHR const & ) const = default; +#else + bool operator==( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( raygenShaderRecordAddress == rhs.raygenShaderRecordAddress ) && ( raygenShaderRecordSize == rhs.raygenShaderRecordSize ) && + ( missShaderBindingTableAddress == rhs.missShaderBindingTableAddress ) && ( missShaderBindingTableSize == rhs.missShaderBindingTableSize ) && + ( missShaderBindingTableStride == rhs.missShaderBindingTableStride ) && ( hitShaderBindingTableAddress == rhs.hitShaderBindingTableAddress ) && + ( hitShaderBindingTableSize == rhs.hitShaderBindingTableSize ) && ( hitShaderBindingTableStride == rhs.hitShaderBindingTableStride ) && + ( callableShaderBindingTableAddress == rhs.callableShaderBindingTableAddress ) && + ( callableShaderBindingTableSize == rhs.callableShaderBindingTableSize ) && + ( callableShaderBindingTableStride == rhs.callableShaderBindingTableStride ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( depth == rhs.depth ); +# endif + } + + bool operator!=( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + + struct TraceRaysIndirectCommandKHR + { + using NativeType = VkTraceRaysIndirectCommandKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + { + } + + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TraceRaysIndirectCommandKHR( *reinterpret_cast( &rhs ) ) + { + } + + explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + { + } + + TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( width, height, depth ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default; +#else + bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); +# endif + } + + bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + + struct ValidationCacheCreateInfoEXT + { + using NativeType = VkValidationCacheCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + { + } + + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) && + ( pInitialData == rhs.pInitialData ); +# endif + } + + bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; + }; + + template <> + struct CppType + { + using Type = ValidationCacheCreateInfoEXT; + }; + + struct ValidationFeaturesEXT + { + using NativeType = VkValidationFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, + uint32_t disabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , enabledValidationFeatureCount( enabledValidationFeatureCount_ ) + , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) + , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) + , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) + { + } + + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , enabledValidationFeatureCount( static_cast( enabledValidationFeatures_.size() ) ) + , pEnabledValidationFeatures( enabledValidationFeatures_.data() ) + , disabledValidationFeatureCount( static_cast( disabledValidationFeatures_.size() ) ) + , pDisabledValidationFeatures( disabledValidationFeatures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = enabledValidationFeatureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledValidationFeatures = pEnabledValidationFeatures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setEnabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_ ) + VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = static_cast( enabledValidationFeatures_.size() ); + pEnabledValidationFeatures = enabledValidationFeatures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = disabledValidationFeatureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationFeatures = pDisabledValidationFeatures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setDisabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ ) + VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = static_cast( disabledValidationFeatures_.size() ); + pDisabledValidationFeatures = disabledValidationFeatures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFeaturesEXT const & ) const = default; +#else + bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) && + ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) && + ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); +# endif + } + + bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; + const void * pNext = {}; + uint32_t enabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {}; + uint32_t disabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {}; + }; + + template <> + struct CppType + { + using Type = ValidationFeaturesEXT; + }; + + struct ValidationFlagsEXT + { + using NativeType = VkValidationFlagsEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , disabledValidationCheckCount( disabledValidationCheckCount_ ) + , pDisabledValidationChecks( pDisabledValidationChecks_ ) + { + } + + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ValidationFlagsEXT( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , disabledValidationCheckCount( static_cast( disabledValidationChecks_.size() ) ) + , pDisabledValidationChecks( disabledValidationChecks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = disabledValidationCheckCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & + setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationChecks = pDisabledValidationChecks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT & setDisabledValidationChecks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = static_cast( disabledValidationChecks_.size() ); + pDisabledValidationChecks = disabledValidationChecks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFlagsEXT const & ) const = default; +#else + bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) && + ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); +# endif + } + + bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; + const void * pNext = {}; + uint32_t disabledValidationCheckCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {}; + }; + + template <> + struct CppType + { + using Type = ValidationFlagsEXT; + }; + + struct VertexInputAttributeDescription2EXT + { + using NativeType = VkVertexInputAttributeDescription2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + { + } + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription2EXT( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, location, binding, format, offset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && + ( offset == rhs.offset ); +# endif + } + + bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT; + void * pNext = {}; + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + + template <> + struct CppType + { + using Type = VertexInputAttributeDescription2EXT; + }; + + struct VertexInputBindingDescription2EXT + { + using NativeType = VkVertexInputBindingDescription2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, + uint32_t divisor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + , divisor( divisor_ ) + { + } + + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription2EXT( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, binding, stride, inputRate, divisor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default; +#else + bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && + ( divisor == rhs.divisor ); +# endif + } + + bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT; + void * pNext = {}; + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + uint32_t divisor = {}; + }; + + template <> + struct CppType + { + using Type = VertexInputBindingDescription2EXT; + }; + +#if defined( VK_USE_PLATFORM_VI_NN ) + struct ViSurfaceCreateInfoNN + { + using NativeType = VkViSurfaceCreateInfoNN; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , window( window_ ) + { + } + + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + : ViSurfaceCreateInfoNN( *reinterpret_cast( &rhs ) ) + { + } + + ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default; +# else + bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); +# endif + } + + bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; + void * window = {}; + }; + + template <> + struct CppType + { + using Type = ViSurfaceCreateInfoNN; + }; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + struct VideoPictureResourceInfoKHR + { + using NativeType = VkVideoPictureResourceInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + uint32_t baseArrayLayer_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , codedOffset( codedOffset_ ) + , codedExtent( codedExtent_ ) + , baseArrayLayer( baseArrayLayer_ ) + , imageViewBinding( imageViewBinding_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoPictureResourceInfoKHR( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoPictureResourceInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoPictureResourceInfoKHR & operator=( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoPictureResourceInfoKHR & operator=( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + { + codedOffset = codedOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT + { + codedExtent = codedExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT + { + imageViewBinding = imageViewBinding_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoPictureResourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoPictureResourceInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoPictureResourceInfoKHR const & ) const = default; +#else + bool operator==( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( imageViewBinding == rhs.imageViewBinding ); +# endif + } + + bool operator!=( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + uint32_t baseArrayLayer = {}; + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {}; + }; + + template <> + struct CppType + { + using Type = VideoPictureResourceInfoKHR; + }; + + struct VideoReferenceSlotInfoKHR + { + using NativeType = VkVideoReferenceSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int32_t slotIndex_ = {}, + const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , slotIndex( slotIndex_ ) + , pPictureResource( pPictureResource_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoReferenceSlotInfoKHR( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoReferenceSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoReferenceSlotInfoKHR & operator=( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoReferenceSlotInfoKHR & operator=( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int32_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + { + slotIndex = slotIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & + setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + pPictureResource = pPictureResource_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoReferenceSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoReferenceSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, slotIndex, pPictureResource ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoReferenceSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pPictureResource == rhs.pPictureResource ); +# endif + } + + bool operator!=( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotInfoKHR; + const void * pNext = {}; + int32_t slotIndex = {}; + const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource = {}; + }; + + template <> + struct CppType + { + using Type = VideoReferenceSlotInfoKHR; + }; + + struct VideoBeginCodingInfoKHR + { + using NativeType = VkVideoBeginCodingInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBeginCodingInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParameters = videoSessionParameters_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default; +#else + bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( videoSession == rhs.videoSession ) && + ( videoSessionParameters == rhs.videoSessionParameters ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); +# endif + } + + bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + }; + + template <> + struct CppType + { + using Type = VideoBeginCodingInfoKHR; + }; + + struct VideoCapabilitiesKHR + { + using NativeType = VkVideoCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + uint32_t maxDpbSlots_ = {}, + uint32_t maxActiveReferencePictures_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ ) + , minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ ) + , pictureAccessGranularity( pictureAccessGranularity_ ) + , minCodedExtent( minCodedExtent_ ) + , maxCodedExtent( maxCodedExtent_ ) + , maxDpbSlots( maxDpbSlots_ ) + , maxActiveReferencePictures( maxActiveReferencePictures_ ) + , stdHeaderVersion( stdHeaderVersion_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + minBitstreamBufferOffsetAlignment, + minBitstreamBufferSizeAlignment, + pictureAccessGranularity, + minCodedExtent, + maxCodedExtent, + maxDpbSlots, + maxActiveReferencePictures, + stdHeaderVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) && + ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && ( pictureAccessGranularity == rhs.pictureAccessGranularity ) && + ( minCodedExtent == rhs.minCodedExtent ) && ( maxCodedExtent == rhs.maxCodedExtent ) && ( maxDpbSlots == rhs.maxDpbSlots ) && + ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( stdHeaderVersion == rhs.stdHeaderVersion ); +# endif + } + + bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity = {}; + VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + uint32_t maxDpbSlots = {}; + uint32_t maxActiveReferencePictures = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion = {}; + }; + + template <> + struct CppType + { + using Type = VideoCapabilitiesKHR; + }; + + struct VideoCodingControlInfoKHR + { + using NativeType = VkVideoCodingControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCodingControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoCodingControlInfoKHR const & ) const = default; +#else + bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = VideoCodingControlInfoKHR; + }; + + struct VideoDecodeCapabilitiesKHR + { + using NativeType = VkVideoDecodeCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeCapabilitiesKHR; + }; + + struct VideoDecodeH264CapabilitiesKHR + { + using NativeType = VkVideoDecodeH264CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( StdVideoH264LevelIdc maxLevelIdc_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxLevelIdc( maxLevelIdc_ ) + , fieldOffsetGranularity( fieldOffsetGranularity_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264CapabilitiesKHR( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264CapabilitiesKHR & operator=( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264CapabilitiesKHR & operator=( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxLevelIdc, fieldOffsetGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) && + ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ); + } + + bool operator!=( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesKHR; + void * pNext = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264CapabilitiesKHR; + }; + + struct VideoDecodeH264DpbSlotInfoKHR + { + using NativeType = VkVideoDecodeH264DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pStdReferenceInfo( pStdReferenceInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264DpbSlotInfoKHR( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264DpbSlotInfoKHR & operator=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264DpbSlotInfoKHR & operator=( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264DpbSlotInfoKHR; + }; + + struct VideoDecodeH264PictureInfoKHR + { + using NativeType = VkVideoDecodeH264PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, + uint32_t sliceCount_ = {}, + const uint32_t * pSliceOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + , sliceCount( sliceCount_ ) + , pSliceOffsets( pSliceOffsets_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoKHR( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceOffsets_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( static_cast( sliceOffsets_.size() ) ), pSliceOffsets( sliceOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH264PictureInfoKHR & operator=( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264PictureInfoKHR & operator=( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT + { + sliceCount = sliceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPSliceOffsets( const uint32_t * pSliceOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSliceOffsets = pSliceOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoKHR & setSliceOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceOffsets_ ) VULKAN_HPP_NOEXCEPT + { + sliceCount = static_cast( sliceOffsets_.size() ); + pSliceOffsets = sliceOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdPictureInfo, sliceCount, pSliceOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceCount == rhs.sliceCount ) && + ( pSliceOffsets == rhs.pSliceOffsets ); +# endif + } + + bool operator!=( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {}; + uint32_t sliceCount = {}; + const uint32_t * pSliceOffsets = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264PictureInfoKHR; + }; + + struct VideoDecodeH264ProfileInfoKHR + { + using NativeType = VkVideoDecodeH264ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( StdVideoH264ProfileIdc stdProfileIdc_ = {}, + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ = + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stdProfileIdc( stdProfileIdc_ ) + , pictureLayout( pictureLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264ProfileInfoKHR( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264ProfileInfoKHR & operator=( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264ProfileInfoKHR & operator=( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & + setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ ) VULKAN_HPP_NOEXCEPT + { + pictureLayout = pictureLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc, pictureLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) && + ( pictureLayout == rhs.pictureLayout ); + } + + bool operator!=( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileInfoKHR; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264ProfileInfoKHR; + }; + + struct VideoDecodeH264SessionParametersAddInfoKHR + { + using NativeType = VkVideoDecodeH264SessionParametersAddInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( uint32_t stdSPSCount_ = {}, + const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stdSPSCount( stdSPSCount_ ) + , pStdSPSs( pStdSPSs_ ) + , stdPPSCount( stdPPSCount_ ) + , pStdPPSs( pStdPPSs_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersAddInfoKHR( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH264SessionParametersAddInfoKHR & operator=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264SessionParametersAddInfoKHR & operator=( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = stdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPSs = pStdPPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && + ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif + } + + bool operator!=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdSPSCount = {}; + const StdVideoH264SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH264PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersAddInfoKHR; + }; + + struct VideoDecodeH264SessionParametersCreateInfoKHR + { + using NativeType = VkVideoDecodeH264SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoDecodeH264SessionParametersCreateInfoKHR( uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxStdSPSCount( maxStdSPSCount_ ) + , maxStdPPSCount( maxStdPPSCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR + VideoDecodeH264SessionParametersCreateInfoKHR( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoKHR( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdSPSCount = maxStdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdPPSCount = maxStdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) && + ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersCreateInfoKHR; + }; + + struct VideoDecodeH265CapabilitiesKHR + { + using NativeType = VkVideoDecodeH265CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( StdVideoH265LevelIdc maxLevelIdc_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxLevelIdc( maxLevelIdc_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265CapabilitiesKHR( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265CapabilitiesKHR & operator=( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265CapabilitiesKHR & operator=( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoDecodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxLevelIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ); + } + + bool operator!=( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesKHR; + void * pNext = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265CapabilitiesKHR; + }; + + struct VideoDecodeH265DpbSlotInfoKHR + { + using NativeType = VkVideoDecodeH265DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pStdReferenceInfo( pStdReferenceInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoKHR( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265DpbSlotInfoKHR & operator=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265DpbSlotInfoKHR & operator=( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265DpbSlotInfoKHR; + }; + + struct VideoDecodeH265PictureInfoKHR + { + using NativeType = VkVideoDecodeH265PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, + uint32_t sliceSegmentCount_ = {}, + const uint32_t * pSliceSegmentOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + , sliceSegmentCount( sliceSegmentCount_ ) + , pSliceSegmentOffsets( pSliceSegmentOffsets_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265PictureInfoKHR( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceSegmentOffsets_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + , sliceSegmentCount( static_cast( sliceSegmentOffsets_.size() ) ) + , pSliceSegmentOffsets( sliceSegmentOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH265PictureInfoKHR & operator=( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265PictureInfoKHR & operator=( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setSliceSegmentCount( uint32_t sliceSegmentCount_ ) VULKAN_HPP_NOEXCEPT + { + sliceSegmentCount = sliceSegmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPSliceSegmentOffsets( const uint32_t * pSliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pSliceSegmentOffsets = pSliceSegmentOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265PictureInfoKHR & + setSliceSegmentOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT + { + sliceSegmentCount = static_cast( sliceSegmentOffsets_.size() ); + pSliceSegmentOffsets = sliceSegmentOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdPictureInfo, sliceSegmentCount, pSliceSegmentOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceSegmentCount == rhs.sliceSegmentCount ) && + ( pSliceSegmentOffsets == rhs.pSliceSegmentOffsets ); +# endif + } + + bool operator!=( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH265PictureInfo * pStdPictureInfo = {}; + uint32_t sliceSegmentCount = {}; + const uint32_t * pSliceSegmentOffsets = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265PictureInfoKHR; + }; + + struct VideoDecodeH265ProfileInfoKHR + { + using NativeType = VkVideoDecodeH265ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stdProfileIdc( stdProfileIdc_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265ProfileInfoKHR( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265ProfileInfoKHR & operator=( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265ProfileInfoKHR & operator=( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileInfoKHR; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265ProfileInfoKHR; + }; + + struct VideoDecodeH265SessionParametersAddInfoKHR + { + using NativeType = VkVideoDecodeH265SessionParametersAddInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( uint32_t stdVPSCount_ = {}, + const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, + uint32_t stdSPSCount_ = {}, + const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stdVPSCount( stdVPSCount_ ) + , pStdVPSs( pStdVPSs_ ) + , stdSPSCount( stdSPSCount_ ) + , pStdSPSs( pStdSPSs_ ) + , stdPPSCount( stdPPSCount_ ) + , pStdPPSs( pStdPPSs_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoKHR( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdVPSCount( static_cast( stdVPSs_.size() ) ) + , pStdVPSs( stdVPSs_.data() ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeH265SessionParametersAddInfoKHR & operator=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265SessionParametersAddInfoKHR & operator=( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = stdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdVPSs = pStdVPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR & + setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = static_cast( stdVPSs_.size() ); + pStdVPSs = stdVPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = stdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPSs = pStdPPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) && + ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif + } + + bool operator!=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdVPSCount = {}; + const StdVideoH265VideoParameterSet * pStdVPSs = {}; + uint32_t stdSPSCount = {}; + const StdVideoH265SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH265PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersAddInfoKHR; + }; + + struct VideoDecodeH265SessionParametersCreateInfoKHR + { + using NativeType = VkVideoDecodeH265SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoDecodeH265SessionParametersCreateInfoKHR( uint32_t maxStdVPSCount_ = {}, + uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxStdVPSCount( maxStdVPSCount_ ) + , maxStdSPSCount( maxStdSPSCount_ ) + , maxStdPPSCount( maxStdPPSCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR + VideoDecodeH265SessionParametersCreateInfoKHR( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersCreateInfoKHR( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdVPSCount = maxStdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdSPSCount = maxStdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdPPSCount = maxStdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH265SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && + ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdVPSCount = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH265SessionParametersCreateInfoKHR; + }; + + struct VideoDecodeInfoKHR + { + using NativeType = VkVideoDecodeInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoDecodeInfoKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + { + srcBuffer = srcBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferOffset = srcBufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT + { + srcBufferRange = srcBufferRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setDstPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + dstPictureResource = dstPictureResource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcBuffer == rhs.srcBuffer ) && + ( srcBufferOffset == rhs.srcBufferOffset ) && ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); +# endif + } + + bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeInfoKHR; + }; + + struct VideoDecodeUsageInfoKHR + { + using NativeType = VkVideoDecodeUsageInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeUsageInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , videoUsageHints( videoUsageHints_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeUsageInfoKHR( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeUsageInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoDecodeUsageInfoKHR & operator=( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoDecodeUsageInfoKHR & operator=( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT + { + videoUsageHints = videoUsageHints_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoDecodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoUsageHints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeUsageInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ); +# endif + } + + bool operator!=( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeUsageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeUsageInfoKHR; + }; + + struct VideoEncodeCapabilitiesKHR + { + using NativeType = VkVideoEncodeCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {}, + uint32_t maxRateControlLayers_ = {}, + uint64_t maxBitrate_ = {}, + uint32_t maxQualityLevels_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , rateControlModes( rateControlModes_ ) + , maxRateControlLayers( maxRateControlLayers_ ) + , maxBitrate( maxBitrate_ ) + , maxQualityLevels( maxQualityLevels_ ) + , encodeInputPictureGranularity( encodeInputPictureGranularity_ ) + , supportedEncodeFeedbackFlags( supportedEncodeFeedbackFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + rateControlModes, + maxRateControlLayers, + maxBitrate, + maxQualityLevels, + encodeInputPictureGranularity, + supportedEncodeFeedbackFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlModes == rhs.rateControlModes ) && + ( maxRateControlLayers == rhs.maxRateControlLayers ) && ( maxBitrate == rhs.maxBitrate ) && ( maxQualityLevels == rhs.maxQualityLevels ) && + ( encodeInputPictureGranularity == rhs.encodeInputPictureGranularity ) && ( supportedEncodeFeedbackFlags == rhs.supportedEncodeFeedbackFlags ); +# endif + } + + bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {}; + uint32_t maxRateControlLayers = {}; + uint64_t maxBitrate = {}; + uint32_t maxQualityLevels = {}; + VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeCapabilitiesKHR; + }; + + struct VideoEncodeH264CapabilitiesKHR + { + using NativeType = VkVideoEncodeH264CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR flags_ = {}, + StdVideoH264LevelIdc maxLevelIdc_ = {}, + uint32_t maxSliceCount_ = {}, + uint32_t maxPPictureL0ReferenceCount_ = {}, + uint32_t maxBPictureL0ReferenceCount_ = {}, + uint32_t maxL1ReferenceCount_ = {}, + uint32_t maxTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalLayerPattern_ = {}, + int32_t minQp_ = {}, + int32_t maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , maxLevelIdc( maxLevelIdc_ ) + , maxSliceCount( maxSliceCount_ ) + , maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ) + , maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ) + , maxL1ReferenceCount( maxL1ReferenceCount_ ) + , maxTemporalLayerCount( maxTemporalLayerCount_ ) + , expectDyadicTemporalLayerPattern( expectDyadicTemporalLayerPattern_ ) + , minQp( minQp_ ) + , maxQp( maxQp_ ) + , prefersGopRemainingFrames( prefersGopRemainingFrames_ ) + , requiresGopRemainingFrames( requiresGopRemainingFrames_ ) + , stdSyntaxFlags( stdSyntaxFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264CapabilitiesKHR( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264CapabilitiesKHR & operator=( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264CapabilitiesKHR & operator=( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + maxLevelIdc, + maxSliceCount, + maxPPictureL0ReferenceCount, + maxBPictureL0ReferenceCount, + maxL1ReferenceCount, + maxTemporalLayerCount, + expectDyadicTemporalLayerPattern, + minQp, + maxQp, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = maxSliceCount <=> rhs.maxSliceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = expectDyadicTemporalLayerPattern <=> rhs.expectDyadicTemporalLayerPattern; cmp != 0 ) + return cmp; + if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 ) + return cmp; + if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) && ( maxSliceCount == rhs.maxSliceCount ) && + ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && + ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxTemporalLayerCount == rhs.maxTemporalLayerCount ) && + ( expectDyadicTemporalLayerPattern == rhs.expectDyadicTemporalLayerPattern ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); + } + + bool operator!=( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR flags = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; + uint32_t maxSliceCount = {}; + uint32_t maxPPictureL0ReferenceCount = {}; + uint32_t maxBPictureL0ReferenceCount = {}; + uint32_t maxL1ReferenceCount = {}; + uint32_t maxTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalLayerPattern = {}; + int32_t minQp = {}; + int32_t maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264CapabilitiesKHR; + }; + + struct VideoEncodeH264DpbSlotInfoKHR + { + using NativeType = VkVideoEncodeH264DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pStdReferenceInfo( pStdReferenceInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264DpbSlotInfoKHR( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264DpbSlotInfoKHR & operator=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264DpbSlotInfoKHR & operator=( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264DpbSlotInfoKHR; + }; + + struct VideoEncodeH264FrameSizeKHR + { + using NativeType = VkVideoEncodeH264FrameSizeKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT + : frameISize( frameISize_ ) + , framePSize( framePSize_ ) + , frameBSize( frameBSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264FrameSizeKHR( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264FrameSizeKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264FrameSizeKHR & operator=( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264FrameSizeKHR & operator=( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT + { + frameISize = frameISize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT + { + framePSize = framePSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT + { + frameBSize = frameBSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264FrameSizeKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( frameISize, framePSize, frameBSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); +# endif + } + + bool operator!=( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t frameISize = {}; + uint32_t framePSize = {}; + uint32_t frameBSize = {}; + }; + + struct VideoEncodeH264GopRemainingFrameInfoKHR + { + using NativeType = VkVideoEncodeH264GopRemainingFrameInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingI_ = {}, + uint32_t gopRemainingP_ = {}, + uint32_t gopRemainingB_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , useGopRemainingFrames( useGopRemainingFrames_ ) + , gopRemainingI( gopRemainingI_ ) + , gopRemainingP( gopRemainingP_ ) + , gopRemainingB( gopRemainingB_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264GopRemainingFrameInfoKHR( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT + { + useGopRemainingFrames = useGopRemainingFrames_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingI = gopRemainingI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingP = gopRemainingP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingB = gopRemainingB_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingI == rhs.gopRemainingI ) && ( gopRemainingP == rhs.gopRemainingP ) && ( gopRemainingB == rhs.gopRemainingB ); +# endif + } + + bool operator!=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingI = {}; + uint32_t gopRemainingP = {}; + uint32_t gopRemainingB = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264GopRemainingFrameInfoKHR; + }; + + struct VideoEncodeH264NaluSliceInfoKHR + { + using NativeType = VkVideoEncodeH264NaluSliceInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( int32_t constantQp_ = {}, + const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , constantQp( constantQp_ ) + , pStdSliceHeader( pStdSliceHeader_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264NaluSliceInfoKHR( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264NaluSliceInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264NaluSliceInfoKHR & operator=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264NaluSliceInfoKHR & operator=( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT + { + constantQp = constantQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPStdSliceHeader( const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ ) VULKAN_HPP_NOEXCEPT + { + pStdSliceHeader = pStdSliceHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264NaluSliceInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264NaluSliceInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, constantQp, pStdSliceHeader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264NaluSliceInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantQp == rhs.constantQp ) && ( pStdSliceHeader == rhs.pStdSliceHeader ); +# endif + } + + bool operator!=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceInfoKHR; + const void * pNext = {}; + int32_t constantQp = {}; + const StdVideoEncodeH264SliceHeader * pStdSliceHeader = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264NaluSliceInfoKHR; + }; + + struct VideoEncodeH264PictureInfoKHR + { + using NativeType = VkVideoEncodeH264PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( uint32_t naluSliceEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ = {}, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , naluSliceEntryCount( naluSliceEntryCount_ ) + , pNaluSliceEntries( pNaluSliceEntries_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + , generatePrefixNalu( generatePrefixNalu_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264PictureInfoKHR( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264PictureInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceEntries_, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , naluSliceEntryCount( static_cast( naluSliceEntries_.size() ) ) + , pNaluSliceEntries( naluSliceEntries_.data() ) + , pStdPictureInfo( pStdPictureInfo_ ) + , generatePrefixNalu( generatePrefixNalu_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264PictureInfoKHR & operator=( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264PictureInfoKHR & operator=( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = naluSliceEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & + setPNaluSliceEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + pNaluSliceEntries = pNaluSliceEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264PictureInfoKHR & setNaluSliceEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceEntryCount = static_cast( naluSliceEntries_.size() ); + pNaluSliceEntries = naluSliceEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setGeneratePrefixNalu( VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ ) VULKAN_HPP_NOEXCEPT + { + generatePrefixNalu = generatePrefixNalu_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, naluSliceEntryCount, pNaluSliceEntries, pStdPictureInfo, generatePrefixNalu ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && + ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( generatePrefixNalu == rhs.generatePrefixNalu ); +# endif + } + + bool operator!=( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264PictureInfoKHR; + const void * pNext = {}; + uint32_t naluSliceEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries = {}; + const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {}; + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264PictureInfoKHR; + }; + + struct VideoEncodeH264ProfileInfoKHR + { + using NativeType = VkVideoEncodeH264ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stdProfileIdc( stdProfileIdc_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264ProfileInfoKHR( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264ProfileInfoKHR & operator=( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264ProfileInfoKHR & operator=( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileInfoKHR; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264ProfileInfoKHR; + }; + + struct VideoEncodeH264QpKHR + { + using NativeType = VkVideoEncodeH264QpKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT + : qpI( qpI_ ) + , qpP( qpP_ ) + , qpB( qpB_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264QpKHR( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QpKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264QpKHR & operator=( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264QpKHR & operator=( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT + { + qpI = qpI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT + { + qpP = qpP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT + { + qpB = qpB_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264QpKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QpKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( qpI, qpP, qpB ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QpKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); +# endif + } + + bool operator!=( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t qpI = {}; + int32_t qpP = {}; + int32_t qpB = {}; + }; + + struct VideoEncodeH264QualityLevelPropertiesKHR + { + using NativeType = VkVideoEncodeH264QualityLevelPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredIdrPeriod_ = {}, + uint32_t preferredConsecutiveBFrameCount_ = {}, + uint32_t preferredTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR preferredConstantQp_ = {}, + uint32_t preferredMaxL0ReferenceCount_ = {}, + uint32_t preferredMaxL1ReferenceCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , preferredRateControlFlags( preferredRateControlFlags_ ) + , preferredGopFrameCount( preferredGopFrameCount_ ) + , preferredIdrPeriod( preferredIdrPeriod_ ) + , preferredConsecutiveBFrameCount( preferredConsecutiveBFrameCount_ ) + , preferredTemporalLayerCount( preferredTemporalLayerCount_ ) + , preferredConstantQp( preferredConstantQp_ ) + , preferredMaxL0ReferenceCount( preferredMaxL0ReferenceCount_ ) + , preferredMaxL1ReferenceCount( preferredMaxL1ReferenceCount_ ) + , preferredStdEntropyCodingModeFlag( preferredStdEntropyCodingModeFlag_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264QualityLevelPropertiesKHR( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264QualityLevelPropertiesKHR & operator=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264QualityLevelPropertiesKHR & operator=( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH264QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredIdrPeriod, + preferredConsecutiveBFrameCount, + preferredTemporalLayerCount, + preferredConstantQp, + preferredMaxL0ReferenceCount, + preferredMaxL1ReferenceCount, + preferredStdEntropyCodingModeFlag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredIdrPeriod == rhs.preferredIdrPeriod ) && + ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount ) && ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount ) && + ( preferredConstantQp == rhs.preferredConstantQp ) && ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount ) && + ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount ) && + ( preferredStdEntropyCodingModeFlag == rhs.preferredStdEntropyCodingModeFlag ); +# endif + } + + bool operator!=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredIdrPeriod = {}; + uint32_t preferredConsecutiveBFrameCount = {}; + uint32_t preferredTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR preferredConstantQp = {}; + uint32_t preferredMaxL0ReferenceCount = {}; + uint32_t preferredMaxL1ReferenceCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264QualityLevelPropertiesKHR; + }; + + struct VideoEncodeH264RateControlInfoKHR + { + using NativeType = VkVideoEncodeH264RateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t idrPeriod_ = {}, + uint32_t consecutiveBFrameCount_ = {}, + uint32_t temporalLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , gopFrameCount( gopFrameCount_ ) + , idrPeriod( idrPeriod_ ) + , consecutiveBFrameCount( consecutiveBFrameCount_ ) + , temporalLayerCount( temporalLayerCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264RateControlInfoKHR( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264RateControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264RateControlInfoKHR & operator=( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264RateControlInfoKHR & operator=( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + gopFrameCount = gopFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT + { + idrPeriod = idrPeriod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBFrameCount = consecutiveBFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + temporalLayerCount = temporalLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, temporalLayerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( temporalLayerCount == rhs.temporalLayerCount ); +# endif + } + + bool operator!=( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t idrPeriod = {}; + uint32_t consecutiveBFrameCount = {}; + uint32_t temporalLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264RateControlInfoKHR; + }; + + struct VideoEncodeH264RateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeH264RateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR minQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , useMinQp( useMinQp_ ) + , minQp( minQp_ ) + , useMaxQp( useMaxQp_ ) + , maxQp( maxQp_ ) + , useMaxFrameSize( useMaxFrameSize_ ) + , maxFrameSize( maxFrameSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264RateControlLayerInfoKHR( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264RateControlLayerInfoKHR & operator=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264RateControlLayerInfoKHR & operator=( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT + { + useMinQp = useMinQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT + { + minQp = minQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT + { + useMaxQp = useMaxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT + { + maxQp = maxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + useMaxFrameSize = useMaxFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameSize = maxFrameSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && + ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize ); +# endif + } + + bool operator!=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR minQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264RateControlLayerInfoKHR; + }; + + struct VideoEncodeH264SessionCreateInfoKHR + { + using NativeType = VkVideoEncodeH264SessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, + StdVideoH264LevelIdc maxLevelIdc_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , useMaxLevelIdc( useMaxLevelIdc_ ) + , maxLevelIdc( maxLevelIdc_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionCreateInfoKHR( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264SessionCreateInfoKHR & operator=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264SessionCreateInfoKHR & operator=( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setUseMaxLevelIdc( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT + { + useMaxLevelIdc = useMaxLevelIdc_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH264LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT + { + maxLevelIdc = maxLevelIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevelIdc == rhs.useMaxLevelIdc ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionCreateInfoKHR; + }; + + struct VideoEncodeH264SessionParametersAddInfoKHR + { + using NativeType = VkVideoEncodeH264SessionParametersAddInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( uint32_t stdSPSCount_ = {}, + const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stdSPSCount( stdSPSCount_ ) + , pStdSPSs( pStdSPSs_ ) + , stdPPSCount( stdPPSCount_ ) + , pStdPPSs( pStdPPSs_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersAddInfoKHR( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH264SessionParametersAddInfoKHR & operator=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersAddInfoKHR & operator=( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = stdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPSs = pStdPPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && + ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdSPSCount = {}; + const StdVideoH264SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH264PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersAddInfoKHR; + }; + + struct VideoEncodeH264SessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeH264SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersCreateInfoKHR( uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxStdSPSCount( maxStdSPSCount_ ) + , maxStdPPSCount( maxStdPPSCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersCreateInfoKHR( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersCreateInfoKHR( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdSPSCount = maxStdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdPPSCount = maxStdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) && + ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersCreateInfoKHR; + }; + + struct VideoEncodeH264SessionParametersFeedbackInfoKHR + { + using NativeType = VkVideoEncodeH264SessionParametersFeedbackInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , hasStdSPSOverrides( hasStdSPSOverrides_ ) + , hasStdPPSOverrides( hasStdPPSOverrides_ ) + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersFeedbackInfoKHR( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersFeedbackInfoKHR( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasStdSPSOverrides, hasStdPPSOverrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasStdSPSOverrides == rhs.hasStdSPSOverrides ) && + ( hasStdPPSOverrides == rhs.hasStdPPSOverrides ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersFeedbackInfoKHR; + }; + + struct VideoEncodeH264SessionParametersGetInfoKHR + { + using NativeType = VkVideoEncodeH264SessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ = {}, + uint32_t stdSPSId_ = {}, + uint32_t stdPPSId_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , writeStdSPS( writeStdSPS_ ) + , writeStdPPS( writeStdPPS_ ) + , stdSPSId( stdSPSId_ ) + , stdPPSId( stdPPSId_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264SessionParametersGetInfoKHR( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264SessionParametersGetInfoKHR & operator=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264SessionParametersGetInfoKHR & operator=( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdSPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdSPS = writeStdSPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdPPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdPPS = writeStdPPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSId = stdSPSId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSId = stdPPSId_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH264SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, writeStdSPS, writeStdPPS, stdSPSId, stdPPSId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( writeStdSPS == rhs.writeStdSPS ) && ( writeStdPPS == rhs.writeStdPPS ) && + ( stdSPSId == rhs.stdSPSId ) && ( stdPPSId == rhs.stdPPSId ); +# endif + } + + bool operator!=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS = {}; + uint32_t stdSPSId = {}; + uint32_t stdPPSId = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264SessionParametersGetInfoKHR; + }; + + struct VideoEncodeH265CapabilitiesKHR + { + using NativeType = VkVideoEncodeH265CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR flags_ = {}, + StdVideoH265LevelIdc maxLevelIdc_ = {}, + uint32_t maxSliceSegmentCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTiles_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR ctbSizes_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes_ = {}, + uint32_t maxPPictureL0ReferenceCount_ = {}, + uint32_t maxBPictureL0ReferenceCount_ = {}, + uint32_t maxL1ReferenceCount_ = {}, + uint32_t maxSubLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalSubLayerPattern_ = {}, + int32_t minQp_ = {}, + int32_t maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , maxLevelIdc( maxLevelIdc_ ) + , maxSliceSegmentCount( maxSliceSegmentCount_ ) + , maxTiles( maxTiles_ ) + , ctbSizes( ctbSizes_ ) + , transformBlockSizes( transformBlockSizes_ ) + , maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ) + , maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ) + , maxL1ReferenceCount( maxL1ReferenceCount_ ) + , maxSubLayerCount( maxSubLayerCount_ ) + , expectDyadicTemporalSubLayerPattern( expectDyadicTemporalSubLayerPattern_ ) + , minQp( minQp_ ) + , maxQp( maxQp_ ) + , prefersGopRemainingFrames( prefersGopRemainingFrames_ ) + , requiresGopRemainingFrames( requiresGopRemainingFrames_ ) + , stdSyntaxFlags( stdSyntaxFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265CapabilitiesKHR( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265CapabilitiesKHR & operator=( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265CapabilitiesKHR & operator=( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + maxLevelIdc, + maxSliceSegmentCount, + maxTiles, + ctbSizes, + transformBlockSizes, + maxPPictureL0ReferenceCount, + maxBPictureL0ReferenceCount, + maxL1ReferenceCount, + maxSubLayerCount, + expectDyadicTemporalSubLayerPattern, + minQp, + maxQp, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = maxSliceSegmentCount <=> rhs.maxSliceSegmentCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 ) + return cmp; + if ( auto cmp = ctbSizes <=> rhs.ctbSizes; cmp != 0 ) + return cmp; + if ( auto cmp = transformBlockSizes <=> rhs.transformBlockSizes; cmp != 0 ) + return cmp; + if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxSubLayerCount <=> rhs.maxSubLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = expectDyadicTemporalSubLayerPattern <=> rhs.expectDyadicTemporalSubLayerPattern; cmp != 0 ) + return cmp; + if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 ) + return cmp; + if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ) && ( maxSliceSegmentCount == rhs.maxSliceSegmentCount ) && + ( maxTiles == rhs.maxTiles ) && ( ctbSizes == rhs.ctbSizes ) && ( transformBlockSizes == rhs.transformBlockSizes ) && + ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && + ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxSubLayerCount == rhs.maxSubLayerCount ) && + ( expectDyadicTemporalSubLayerPattern == rhs.expectDyadicTemporalSubLayerPattern ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); + } + + bool operator!=( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR flags = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; + uint32_t maxSliceSegmentCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTiles = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR ctbSizes = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes = {}; + uint32_t maxPPictureL0ReferenceCount = {}; + uint32_t maxBPictureL0ReferenceCount = {}; + uint32_t maxL1ReferenceCount = {}; + uint32_t maxSubLayerCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalSubLayerPattern = {}; + int32_t minQp = {}; + int32_t maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265CapabilitiesKHR; + }; + + struct VideoEncodeH265DpbSlotInfoKHR + { + using NativeType = VkVideoEncodeH265DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pStdReferenceInfo( pStdReferenceInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265DpbSlotInfoKHR( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265DpbSlotInfoKHR & operator=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265DpbSlotInfoKHR & operator=( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265DpbSlotInfoKHR; + }; + + struct VideoEncodeH265FrameSizeKHR + { + using NativeType = VkVideoEncodeH265FrameSizeKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT + : frameISize( frameISize_ ) + , framePSize( framePSize_ ) + , frameBSize( frameBSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265FrameSizeKHR( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265FrameSizeKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265FrameSizeKHR & operator=( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265FrameSizeKHR & operator=( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT + { + frameISize = frameISize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT + { + framePSize = framePSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT + { + frameBSize = frameBSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265FrameSizeKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( frameISize, framePSize, frameBSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); +# endif + } + + bool operator!=( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t frameISize = {}; + uint32_t framePSize = {}; + uint32_t frameBSize = {}; + }; + + struct VideoEncodeH265GopRemainingFrameInfoKHR + { + using NativeType = VkVideoEncodeH265GopRemainingFrameInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingI_ = {}, + uint32_t gopRemainingP_ = {}, + uint32_t gopRemainingB_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , useGopRemainingFrames( useGopRemainingFrames_ ) + , gopRemainingI( gopRemainingI_ ) + , gopRemainingP( gopRemainingP_ ) + , gopRemainingB( gopRemainingB_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265GopRemainingFrameInfoKHR( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT + { + useGopRemainingFrames = useGopRemainingFrames_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingI = gopRemainingI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingP = gopRemainingP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingB = gopRemainingB_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingI == rhs.gopRemainingI ) && ( gopRemainingP == rhs.gopRemainingP ) && ( gopRemainingB == rhs.gopRemainingB ); +# endif + } + + bool operator!=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingI = {}; + uint32_t gopRemainingP = {}; + uint32_t gopRemainingB = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265GopRemainingFrameInfoKHR; + }; + + struct VideoEncodeH265NaluSliceSegmentInfoKHR + { + using NativeType = VkVideoEncodeH265NaluSliceSegmentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( int32_t constantQp_ = {}, + const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , constantQp( constantQp_ ) + , pStdSliceSegmentHeader( pStdSliceSegmentHeader_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265NaluSliceSegmentInfoKHR( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265NaluSliceSegmentInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT + { + constantQp = constantQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & + setPStdSliceSegmentHeader( const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ ) VULKAN_HPP_NOEXCEPT + { + pStdSliceSegmentHeader = pStdSliceSegmentHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265NaluSliceSegmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265NaluSliceSegmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, constantQp, pStdSliceSegmentHeader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265NaluSliceSegmentInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantQp == rhs.constantQp ) && ( pStdSliceSegmentHeader == rhs.pStdSliceSegmentHeader ); +# endif + } + + bool operator!=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR; + const void * pNext = {}; + int32_t constantQp = {}; + const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265NaluSliceSegmentInfoKHR; + }; + + struct VideoEncodeH265PictureInfoKHR + { + using NativeType = VkVideoEncodeH265PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( uint32_t naluSliceSegmentEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ = {}, + const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , naluSliceSegmentEntryCount( naluSliceSegmentEntryCount_ ) + , pNaluSliceSegmentEntries( pNaluSliceSegmentEntries_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265PictureInfoKHR( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265PictureInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceSegmentEntries_, + const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , naluSliceSegmentEntryCount( static_cast( naluSliceSegmentEntries_.size() ) ) + , pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() ) + , pStdPictureInfo( pStdPictureInfo_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH265PictureInfoKHR & operator=( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265PictureInfoKHR & operator=( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & + setPNaluSliceSegmentEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT + { + pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceSegmentEntries_ ) + VULKAN_HPP_NOEXCEPT + { + naluSliceSegmentEntryCount = static_cast( naluSliceSegmentEntries_.size() ); + pNaluSliceSegmentEntries = naluSliceSegmentEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pStdPictureInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount ) && + ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries ) && ( pStdPictureInfo == rhs.pStdPictureInfo ); +# endif + } + + bool operator!=( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265PictureInfoKHR; + const void * pNext = {}; + uint32_t naluSliceSegmentEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries = {}; + const StdVideoEncodeH265PictureInfo * pStdPictureInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265PictureInfoKHR; + }; + + struct VideoEncodeH265ProfileInfoKHR + { + using NativeType = VkVideoEncodeH265ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stdProfileIdc( stdProfileIdc_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265ProfileInfoKHR( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265ProfileInfoKHR & operator=( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265ProfileInfoKHR & operator=( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + { + stdProfileIdc = stdProfileIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfileIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileInfoKHR; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265ProfileInfoKHR; + }; + + struct VideoEncodeH265QpKHR + { + using NativeType = VkVideoEncodeH265QpKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT + : qpI( qpI_ ) + , qpP( qpP_ ) + , qpB( qpB_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QpKHR( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QpKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265QpKHR & operator=( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265QpKHR & operator=( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT + { + qpI = qpI_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT + { + qpP = qpP_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT + { + qpB = qpB_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265QpKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QpKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( qpI, qpP, qpB ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QpKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); +# endif + } + + bool operator!=( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t qpI = {}; + int32_t qpP = {}; + int32_t qpB = {}; + }; + + struct VideoEncodeH265QualityLevelPropertiesKHR + { + using NativeType = VkVideoEncodeH265QualityLevelPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredIdrPeriod_ = {}, + uint32_t preferredConsecutiveBFrameCount_ = {}, + uint32_t preferredSubLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR preferredConstantQp_ = {}, + uint32_t preferredMaxL0ReferenceCount_ = {}, + uint32_t preferredMaxL1ReferenceCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , preferredRateControlFlags( preferredRateControlFlags_ ) + , preferredGopFrameCount( preferredGopFrameCount_ ) + , preferredIdrPeriod( preferredIdrPeriod_ ) + , preferredConsecutiveBFrameCount( preferredConsecutiveBFrameCount_ ) + , preferredSubLayerCount( preferredSubLayerCount_ ) + , preferredConstantQp( preferredConstantQp_ ) + , preferredMaxL0ReferenceCount( preferredMaxL0ReferenceCount_ ) + , preferredMaxL1ReferenceCount( preferredMaxL1ReferenceCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QualityLevelPropertiesKHR( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265QualityLevelPropertiesKHR & operator=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265QualityLevelPropertiesKHR & operator=( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH265QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredIdrPeriod, + preferredConsecutiveBFrameCount, + preferredSubLayerCount, + preferredConstantQp, + preferredMaxL0ReferenceCount, + preferredMaxL1ReferenceCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredIdrPeriod == rhs.preferredIdrPeriod ) && + ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount ) && ( preferredSubLayerCount == rhs.preferredSubLayerCount ) && + ( preferredConstantQp == rhs.preferredConstantQp ) && ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount ) && + ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount ); +# endif + } + + bool operator!=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredIdrPeriod = {}; + uint32_t preferredConsecutiveBFrameCount = {}; + uint32_t preferredSubLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR preferredConstantQp = {}; + uint32_t preferredMaxL0ReferenceCount = {}; + uint32_t preferredMaxL1ReferenceCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265QualityLevelPropertiesKHR; + }; + + struct VideoEncodeH265RateControlInfoKHR + { + using NativeType = VkVideoEncodeH265RateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t idrPeriod_ = {}, + uint32_t consecutiveBFrameCount_ = {}, + uint32_t subLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , gopFrameCount( gopFrameCount_ ) + , idrPeriod( idrPeriod_ ) + , consecutiveBFrameCount( consecutiveBFrameCount_ ) + , subLayerCount( subLayerCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265RateControlInfoKHR( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265RateControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265RateControlInfoKHR & operator=( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265RateControlInfoKHR & operator=( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + gopFrameCount = gopFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT + { + idrPeriod = idrPeriod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBFrameCount = consecutiveBFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setSubLayerCount( uint32_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + subLayerCount = subLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, subLayerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( subLayerCount == rhs.subLayerCount ); +# endif + } + + bool operator!=( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t idrPeriod = {}; + uint32_t consecutiveBFrameCount = {}; + uint32_t subLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265RateControlInfoKHR; + }; + + struct VideoEncodeH265RateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeH265RateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR minQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , useMinQp( useMinQp_ ) + , minQp( minQp_ ) + , useMaxQp( useMaxQp_ ) + , maxQp( maxQp_ ) + , useMaxFrameSize( useMaxFrameSize_ ) + , maxFrameSize( maxFrameSize_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265RateControlLayerInfoKHR( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265RateControlLayerInfoKHR & operator=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265RateControlLayerInfoKHR & operator=( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT + { + useMinQp = useMinQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT + { + minQp = minQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT + { + useMaxQp = useMaxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT + { + maxQp = maxQp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + useMaxFrameSize = useMaxFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameSize = maxFrameSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && + ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize ); +# endif + } + + bool operator!=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR minQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265RateControlLayerInfoKHR; + }; + + struct VideoEncodeH265SessionCreateInfoKHR + { + using NativeType = VkVideoEncodeH265SessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, + StdVideoH265LevelIdc maxLevelIdc_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , useMaxLevelIdc( useMaxLevelIdc_ ) + , maxLevelIdc( maxLevelIdc_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionCreateInfoKHR( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265SessionCreateInfoKHR & operator=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265SessionCreateInfoKHR & operator=( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setUseMaxLevelIdc( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT + { + useMaxLevelIdc = useMaxLevelIdc_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH265LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT + { + maxLevelIdc = maxLevelIdc_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevelIdc == rhs.useMaxLevelIdc ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ); + } + + bool operator!=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionCreateInfoKHR; + }; + + struct VideoEncodeH265SessionParametersAddInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersAddInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( uint32_t stdVPSCount_ = {}, + const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, + uint32_t stdSPSCount_ = {}, + const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , stdVPSCount( stdVPSCount_ ) + , pStdVPSs( pStdVPSs_ ) + , stdSPSCount( stdSPSCount_ ) + , pStdSPSs( pStdSPSs_ ) + , stdPPSCount( stdPPSCount_ ) + , pStdPPSs( pStdPPSs_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersAddInfoKHR( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdVPSCount( static_cast( stdVPSs_.size() ) ) + , pStdVPSs( stdVPSs_.data() ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeH265SessionParametersAddInfoKHR & operator=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersAddInfoKHR & operator=( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = stdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdVPSs = pStdVPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = static_cast( stdVPSs_.size() ); + pStdVPSs = stdVPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = stdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdPPSs = pStdPPSs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) && + ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdVPSCount = {}; + const StdVideoH265VideoParameterSet * pStdVPSs = {}; + uint32_t stdSPSCount = {}; + const StdVideoH265SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH265PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersAddInfoKHR; + }; + + struct VideoEncodeH265SessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersCreateInfoKHR( uint32_t maxStdVPSCount_ = {}, + uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxStdVPSCount( maxStdVPSCount_ ) + , maxStdSPSCount( maxStdSPSCount_ ) + , maxStdPPSCount( maxStdPPSCount_ ) + , pParametersAddInfo( pParametersAddInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersCreateInfoKHR( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersCreateInfoKHR( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdVPSCount = maxStdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdSPSCount = maxStdSPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT + { + maxStdPPSCount = maxStdPPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + { + pParametersAddInfo = pParametersAddInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && + ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdVPSCount = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersCreateInfoKHR; + }; + + struct VideoEncodeH265SessionParametersFeedbackInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersFeedbackInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasStdVPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , hasStdVPSOverrides( hasStdVPSOverrides_ ) + , hasStdSPSOverrides( hasStdSPSOverrides_ ) + , hasStdPPSOverrides( hasStdPPSOverrides_ ) + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersFeedbackInfoKHR( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersFeedbackInfoKHR( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasStdVPSOverrides, hasStdSPSOverrides, hasStdPPSOverrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasStdVPSOverrides == rhs.hasStdVPSOverrides ) && + ( hasStdSPSOverrides == rhs.hasStdSPSOverrides ) && ( hasStdPPSOverrides == rhs.hasStdPPSOverrides ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdVPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersFeedbackInfoKHR; + }; + + struct VideoEncodeH265SessionParametersGetInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ = {}, + uint32_t stdVPSId_ = {}, + uint32_t stdSPSId_ = {}, + uint32_t stdPPSId_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , writeStdVPS( writeStdVPS_ ) + , writeStdSPS( writeStdSPS_ ) + , writeStdPPS( writeStdPPS_ ) + , stdVPSId( stdVPSId_ ) + , stdSPSId( stdSPSId_ ) + , stdPPSId( stdPPSId_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersGetInfoKHR( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265SessionParametersGetInfoKHR & operator=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersGetInfoKHR & operator=( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdVPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdVPS = writeStdVPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdSPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdSPS = writeStdSPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdPPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT + { + writeStdPPS = writeStdPPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdVPSId( uint32_t stdVPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSId = stdVPSId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSId = stdSPSId_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT + { + stdPPSId = stdPPSId_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeH265SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, writeStdVPS, writeStdSPS, writeStdPPS, stdVPSId, stdSPSId, stdPPSId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( writeStdVPS == rhs.writeStdVPS ) && ( writeStdSPS == rhs.writeStdSPS ) && + ( writeStdPPS == rhs.writeStdPPS ) && ( stdVPSId == rhs.stdVPSId ) && ( stdSPSId == rhs.stdSPSId ) && ( stdPPSId == rhs.stdPPSId ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS = {}; + uint32_t stdVPSId = {}; + uint32_t stdSPSId = {}; + uint32_t stdPPSId = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersGetInfoKHR; + }; + + struct VideoEncodeInfoKHR + { + using NativeType = VkVideoEncodeInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + uint32_t precedingExternallyEncodedBytes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , dstBuffer( dstBuffer_ ) + , dstBufferOffset( dstBufferOffset_ ) + , dstBufferRange( dstBufferRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( referenceSlotCount_ ) + , pReferenceSlots( pReferenceSlots_ ) + , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoEncodeInfoKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + uint32_t precedingExternallyEncodedBytes_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , dstBuffer( dstBuffer_ ) + , dstBufferOffset( dstBufferOffset_ ) + , dstBufferRange( dstBufferRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + { + dstBuffer = dstBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstBufferOffset = dstBufferOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_ ) VULKAN_HPP_NOEXCEPT + { + dstBufferRange = dstBufferRange_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setSrcPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT + { + srcPictureResource = srcPictureResource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + { + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT + { + precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + dstBuffer, + dstBufferOffset, + dstBufferRange, + srcPictureResource, + pSetupReferenceSlot, + referenceSlotCount, + pReferenceSlots, + precedingExternallyEncodedBytes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstBuffer == rhs.dstBuffer ) && + ( dstBufferOffset == rhs.dstBufferOffset ) && ( dstBufferRange == rhs.dstBufferRange ) && ( srcPictureResource == rhs.srcPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ) && ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes ); +# endif + } + + bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + uint32_t precedingExternallyEncodedBytes = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeInfoKHR; + }; + + struct VideoEncodeQualityLevelInfoKHR + { + using NativeType = VkVideoEncodeQualityLevelInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQualityLevelInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( uint32_t qualityLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , qualityLevel( qualityLevel_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQualityLevelInfoKHR( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQualityLevelInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQualityLevelInfoKHR & operator=( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeQualityLevelInfoKHR & operator=( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT + { + qualityLevel = qualityLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, qualityLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQualityLevelInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( qualityLevel == rhs.qualityLevel ); +# endif + } + + bool operator!=( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQualityLevelInfoKHR; + const void * pNext = {}; + uint32_t qualityLevel = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQualityLevelInfoKHR; + }; + + struct VideoEncodeQualityLevelPropertiesKHR + { + using NativeType = VkVideoEncodeQualityLevelPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQualityLevelPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode_ = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, + uint32_t preferredRateControlLayerCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , preferredRateControlMode( preferredRateControlMode_ ) + , preferredRateControlLayerCount( preferredRateControlLayerCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQualityLevelPropertiesKHR( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQualityLevelPropertiesKHR & operator=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeQualityLevelPropertiesKHR & operator=( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeQualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, preferredRateControlMode, preferredRateControlLayerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlMode == rhs.preferredRateControlMode ) && + ( preferredRateControlLayerCount == rhs.preferredRateControlLayerCount ); +# endif + } + + bool operator!=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault; + uint32_t preferredRateControlLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQualityLevelPropertiesKHR; + }; + + struct VideoEncodeRateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeRateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlLayerInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( uint64_t averageBitrate_ = {}, + uint64_t maxBitrate_ = {}, + uint32_t frameRateNumerator_ = {}, + uint32_t frameRateDenominator_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , averageBitrate( averageBitrate_ ) + , maxBitrate( maxBitrate_ ) + , frameRateNumerator( frameRateNumerator_ ) + , frameRateDenominator( frameRateDenominator_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlLayerInfoKHR( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeRateControlLayerInfoKHR & operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeRateControlLayerInfoKHR & operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint64_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT + { + averageBitrate = averageBitrate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint64_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT + { + maxBitrate = maxBitrate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateNumerator( uint32_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateNumerator = frameRateNumerator_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setFrameRateDenominator( uint32_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT + { + frameRateDenominator = frameRateDenominator_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeRateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( averageBitrate == rhs.averageBitrate ) && ( maxBitrate == rhs.maxBitrate ) && + ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator ); +# endif + } + + bool operator!=( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR; + const void * pNext = {}; + uint64_t averageBitrate = {}; + uint64_t maxBitrate = {}; + uint32_t frameRateNumerator = {}; + uint32_t frameRateDenominator = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeRateControlLayerInfoKHR; + }; + + struct VideoEncodeRateControlInfoKHR + { + using NativeType = VkVideoEncodeRateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, + uint32_t layerCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers_ = {}, + uint32_t virtualBufferSizeInMs_ = {}, + uint32_t initialVirtualBufferSizeInMs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , rateControlMode( rateControlMode_ ) + , layerCount( layerCount_ ) + , pLayers( pLayers_ ) + , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) + , initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeRateControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeRateControlInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layers_, + uint32_t virtualBufferSizeInMs_ = {}, + uint32_t initialVirtualBufferSizeInMs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , rateControlMode( rateControlMode_ ) + , layerCount( static_cast( layers_.size() ) ) + , pLayers( layers_.data() ) + , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) + , initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeRateControlInfoKHR & operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & + setRateControlMode( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT + { + rateControlMode = rateControlMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & + setPLayers( const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers_ ) VULKAN_HPP_NOEXCEPT + { + pLayers = pLayers_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeRateControlInfoKHR & setLayers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layers_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = static_cast( layers_.size() ); + pLayers = layers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + virtualBufferSizeInMs = virtualBufferSizeInMs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayers, virtualBufferSizeInMs, initialVirtualBufferSizeInMs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlMode == rhs.rateControlMode ) && + ( layerCount == rhs.layerCount ) && ( pLayers == rhs.pLayers ) && ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ) && + ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs ); +# endif + } + + bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault; + uint32_t layerCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers = {}; + uint32_t virtualBufferSizeInMs = {}; + uint32_t initialVirtualBufferSizeInMs = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeRateControlInfoKHR; + }; + + struct VideoEncodeSessionParametersFeedbackInfoKHR + { + using NativeType = VkVideoEncodeSessionParametersFeedbackInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , hasOverrides( hasOverrides_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeSessionParametersFeedbackInfoKHR( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeSessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeSessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasOverrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeSessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasOverrides == rhs.hasOverrides ); +# endif + } + + bool operator!=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasOverrides = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeSessionParametersFeedbackInfoKHR; + }; + + struct VideoEncodeSessionParametersGetInfoKHR + { + using NativeType = VkVideoEncodeSessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeSessionParametersGetInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , videoSessionParameters( videoSessionParameters_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeSessionParametersGetInfoKHR( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeSessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeSessionParametersGetInfoKHR & operator=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeSessionParametersGetInfoKHR & operator=( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & + setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParameters = videoSessionParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeSessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoSessionParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeSessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoSessionParameters == rhs.videoSessionParameters ); +# endif + } + + bool operator!=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeSessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeSessionParametersGetInfoKHR; + }; + + struct VideoEncodeUsageInfoKHR + { + using NativeType = VkVideoEncodeUsageInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeUsageInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , videoUsageHints( videoUsageHints_ ) + , videoContentHints( videoContentHints_ ) + , tuningMode( tuningMode_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeUsageInfoKHR( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeUsageInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeUsageInfoKHR & operator=( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeUsageInfoKHR & operator=( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT + { + videoUsageHints = videoUsageHints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & + setVideoContentHints( VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ ) VULKAN_HPP_NOEXCEPT + { + videoContentHints = videoContentHints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setTuningMode( VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ ) VULKAN_HPP_NOEXCEPT + { + tuningMode = tuningMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoUsageHints, videoContentHints, tuningMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeUsageInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ) && ( videoContentHints == rhs.videoContentHints ) && + ( tuningMode == rhs.tuningMode ); +# endif + } + + bool operator!=( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeUsageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault; + }; + + template <> + struct CppType + { + using Type = VideoEncodeUsageInfoKHR; + }; + + struct VideoEndCodingInfoKHR + { + using NativeType = VkVideoEndCodingInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEndCodingInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEndCodingInfoKHR const & ) const = default; +#else + bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEndCodingInfoKHR; + }; + + struct VideoFormatPropertiesKHR + { + using NativeType = VkVideoFormatPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling imageTiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , format( format_ ) + , componentMapping( componentMapping_ ) + , imageCreateFlags( imageCreateFlags_ ) + , imageType( imageType_ ) + , imageTiling( imageTiling_ ) + , imageUsageFlags( imageUsageFlags_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, componentMapping, imageCreateFlags, imageType, imageTiling, imageUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( componentMapping == rhs.componentMapping ) && + ( imageCreateFlags == rhs.imageCreateFlags ) && ( imageType == rhs.imageType ) && ( imageTiling == rhs.imageTiling ) && + ( imageUsageFlags == rhs.imageUsageFlags ); +# endif + } + + bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling imageTiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatPropertiesKHR; + }; + + struct VideoInlineQueryInfoKHR + { + using NativeType = VkVideoInlineQueryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoInlineQueryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( VULKAN_HPP_NAMESPACE::QueryPool queryPool_ = {}, + uint32_t firstQuery_ = {}, + uint32_t queryCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , queryPool( queryPool_ ) + , firstQuery( firstQuery_ ) + , queryCount( queryCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoInlineQueryInfoKHR( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoInlineQueryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoInlineQueryInfoKHR & operator=( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoInlineQueryInfoKHR & operator=( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool_ ) VULKAN_HPP_NOEXCEPT + { + queryPool = queryPool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setFirstQuery( uint32_t firstQuery_ ) VULKAN_HPP_NOEXCEPT + { + firstQuery = firstQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoInlineQueryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoInlineQueryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queryPool, firstQuery, queryCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoInlineQueryInfoKHR const & ) const = default; +#else + bool operator==( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryPool == rhs.queryPool ) && ( firstQuery == rhs.firstQuery ) && + ( queryCount == rhs.queryCount ); +# endif + } + + bool operator!=( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoInlineQueryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPool queryPool = {}; + uint32_t firstQuery = {}; + uint32_t queryCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoInlineQueryInfoKHR; + }; + + struct VideoProfileListInfoKHR + { + using NativeType = VkVideoProfileListInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileListInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( uint32_t profileCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , profileCount( profileCount_ ) + , pProfiles( pProfiles_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoProfileListInfoKHR( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoProfileListInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoProfileListInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & profiles_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), profileCount( static_cast( profiles_.size() ) ), pProfiles( profiles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoProfileListInfoKHR & operator=( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoProfileListInfoKHR & operator=( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT + { + profileCount = profileCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT + { + pProfiles = pProfiles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoProfileListInfoKHR & + setProfiles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & profiles_ ) VULKAN_HPP_NOEXCEPT + { + profileCount = static_cast( profiles_.size() ); + pProfiles = profiles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoProfileListInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoProfileListInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, profileCount, pProfiles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoProfileListInfoKHR const & ) const = default; +#else + bool operator==( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) && ( pProfiles == rhs.pProfiles ); +# endif + } + + bool operator!=( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileListInfoKHR; + const void * pNext = {}; + uint32_t profileCount = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles = {}; + }; + + template <> + struct CppType + { + using Type = VideoProfileListInfoKHR; + }; + + struct VideoSessionCreateInfoKHR + { + using NativeType = VkVideoSessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, + VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t maxDpbSlots_ = {}, + uint32_t maxActiveReferencePictures_ = {}, + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , flags( flags_ ) + , pVideoProfile( pVideoProfile_ ) + , pictureFormat( pictureFormat_ ) + , maxCodedExtent( maxCodedExtent_ ) + , referencePictureFormat( referencePictureFormat_ ) + , maxDpbSlots( maxDpbSlots_ ) + , maxActiveReferencePictures( maxActiveReferencePictures_ ) + , pStdHeaderVersion( pStdHeaderVersion_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT + { + pVideoProfile = pVideoProfile_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT + { + pictureFormat = pictureFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT + { + maxCodedExtent = maxCodedExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePictureFormat( VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ ) VULKAN_HPP_NOEXCEPT + { + referencePictureFormat = referencePictureFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxDpbSlots( uint32_t maxDpbSlots_ ) VULKAN_HPP_NOEXCEPT + { + maxDpbSlots = maxDpbSlots_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxActiveReferencePictures( uint32_t maxActiveReferencePictures_ ) VULKAN_HPP_NOEXCEPT + { + maxActiveReferencePictures = maxActiveReferencePictures_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & + setPStdHeaderVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ ) VULKAN_HPP_NOEXCEPT + { + pStdHeaderVersion = pStdHeaderVersion_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + queueFamilyIndex, + flags, + pVideoProfile, + pictureFormat, + maxCodedExtent, + referencePictureFormat, + maxDpbSlots, + maxActiveReferencePictures, + pStdHeaderVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( flags == rhs.flags ) && + ( pVideoProfile == rhs.pVideoProfile ) && ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) && + ( referencePictureFormat == rhs.referencePictureFormat ) && ( maxDpbSlots == rhs.maxDpbSlots ) && + ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( pStdHeaderVersion == rhs.pStdHeaderVersion ); +# endif + } + + bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {}; + VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Format referencePictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t maxDpbSlots = {}; + uint32_t maxActiveReferencePictures = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion = {}; + }; + + template <> + struct CppType + { + using Type = VideoSessionCreateInfoKHR; + }; + + struct VideoSessionMemoryRequirementsKHR + { + using NativeType = VkVideoSessionMemoryRequirementsKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionMemoryRequirementsKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( uint32_t memoryBindIndex_ = {}, + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , memoryBindIndex( memoryBindIndex_ ) + , memoryRequirements( memoryRequirements_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionMemoryRequirementsKHR( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionMemoryRequirementsKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoSessionMemoryRequirementsKHR & operator=( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoSessionMemoryRequirementsKHR & operator=( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoSessionMemoryRequirementsKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionMemoryRequirementsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryBindIndex, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionMemoryRequirementsKHR const & ) const = default; +#else + bool operator==( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionMemoryRequirementsKHR; + void * pNext = {}; + uint32_t memoryBindIndex = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + + template <> + struct CppType + { + using Type = VideoSessionMemoryRequirementsKHR; + }; + + struct VideoSessionParametersCreateInfoKHR + { + using NativeType = VkVideoSessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , videoSessionParametersTemplate( videoSessionParametersTemplate_ ) + , videoSession( videoSession_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoSessionParametersCreateInfoKHR & operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoSessionParametersCreateInfoKHR & operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & + setFlags( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & + setVideoSessionParametersTemplate( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParametersTemplate = videoSessionParametersTemplate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT + { + videoSession = videoSession_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, videoSessionParametersTemplate, videoSession ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) && ( videoSession == rhs.videoSession ); +# endif + } + + bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + }; + + template <> + struct CppType + { + using Type = VideoSessionParametersCreateInfoKHR; + }; + + struct VideoSessionParametersUpdateInfoKHR + { + using NativeType = VkVideoSessionParametersUpdateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersUpdateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , updateSequenceCount( updateSequenceCount_ ) + { + } + + VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoSessionParametersUpdateInfoKHR & operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoSessionParametersUpdateInfoKHR & operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT + { + updateSequenceCount = updateSequenceCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, updateSequenceCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default; +#else + bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount ); +# endif + } + + bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR; + const void * pNext = {}; + uint32_t updateSequenceCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoSessionParametersUpdateInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + struct WaylandSurfaceCreateInfoKHR + { + using NativeType = VkWaylandSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, + struct wl_display * display_ = {}, + struct wl_surface * surface_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , display( display_ ) + , surface( surface_ ) + { + } + + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : WaylandSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT + { + display = display_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, display, surface ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) && ( surface == rhs.surface ); +# endif + } + + bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; + struct wl_display * display = {}; + struct wl_surface * surface = {}; + }; + + template <> + struct CppType + { + using Type = WaylandSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32KeyedMutexAcquireReleaseInfoKHR + { + using NativeType = VkWin32KeyedMutexAcquireReleaseInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeouts_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeouts( pAcquireTimeouts_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + { + } + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeouts( acquireTimeouts_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeouts_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeouts_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeouts = pAcquireTimeouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeouts_.size() ); + pAcquireTimeouts = acquireTimeouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoKHR & + setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeouts, releaseCount, pReleaseSyncs, pReleaseKeys ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && ( pAcquireSyncs == rhs.pAcquireSyncs ) && + ( pAcquireKeys == rhs.pAcquireKeys ) && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) && + ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys ); +# endif + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeouts = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; + }; + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32KeyedMutexAcquireReleaseInfoNV + { + using NativeType = VkWin32KeyedMutexAcquireReleaseInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, + const uint64_t * pAcquireKeys_ = {}, + const uint32_t * pAcquireTimeoutMilliseconds_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, + const uint64_t * pReleaseKeys_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + { + } + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , acquireCount( static_cast( acquireSyncs_.size() ) ) + , pAcquireSyncs( acquireSyncs_.data() ) + , pAcquireKeys( acquireKeys_.data() ) + , pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ) + , releaseCount( static_cast( releaseSyncs_.size() ) ) + , pReleaseSyncs( releaseSyncs_.data() ) + , pReleaseKeys( releaseKeys_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() ); + VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() ); + VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() ); +# else + if ( acquireSyncs_.size() != acquireKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" ); + } + if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" ); + } + if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() ); +# else + if ( releaseSyncs_.size() != releaseKeys_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireSyncs_.size() ); + pAcquireSyncs = acquireSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireKeys_.size() ); + pAcquireKeys = acquireKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = static_cast( acquireTimeoutMilliseconds_.size() ); + pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseSyncs_.size() ); + pReleaseSyncs = releaseSyncs_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + Win32KeyedMutexAcquireReleaseInfoNV & + setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = static_cast( releaseKeys_.size() ); + pReleaseKeys = releaseKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, acquireCount, pAcquireSyncs, pAcquireKeys, pAcquireTimeoutMilliseconds, releaseCount, pReleaseSyncs, pReleaseKeys ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default; +# else + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) && ( pAcquireSyncs == rhs.pAcquireSyncs ) && + ( pAcquireKeys == rhs.pAcquireKeys ) && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) && + ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys ); +# endif + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + const void * pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {}; + const uint64_t * pAcquireKeys = {}; + const uint32_t * pAcquireTimeoutMilliseconds = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs = {}; + const uint64_t * pReleaseKeys = {}; + }; + + template <> + struct CppType + { + using Type = Win32KeyedMutexAcquireReleaseInfoNV; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct Win32SurfaceCreateInfoKHR + { + using NativeType = VkWin32SurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, + HINSTANCE hinstance_ = {}, + HWND hwnd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , hinstance( hinstance_ ) + , hwnd( hwnd_ ) + { + } + + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : Win32SurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT + { + hinstance = hinstance_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT + { + hwnd = hwnd_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, hinstance, hwnd ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default; +# else + bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd ); +# endif + } + + bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; + HINSTANCE hinstance = {}; + HWND hwnd = {}; + }; + + template <> + struct CppType + { + using Type = Win32SurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct WriteDescriptorSetAccelerationStructureKHR + { + using NativeType = VkWriteDescriptorSetAccelerationStructureKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + { + } + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & + setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); +# endif + } + + bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureKHR; + }; + + struct WriteDescriptorSetAccelerationStructureNV + { + using NativeType = VkWriteDescriptorSetAccelerationStructureNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + { + } + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetAccelerationStructureNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & + setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); +# endif + } + + bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + const void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSetAccelerationStructureNV; + }; + + struct WriteDescriptorSetInlineUniformBlock + { + using NativeType = VkWriteDescriptorSetInlineUniformBlock; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + { + } + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetInlineUniformBlock( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetInlineUniformBlock( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlock( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dataSize( static_cast( data_.size() * sizeof( T ) ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + WriteDescriptorSetInlineUniformBlock & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = static_cast( data_.size() * sizeof( T ) ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetInlineUniformBlock &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetInlineUniformBlock const & ) const = default; +#else + bool operator==( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( WriteDescriptorSetInlineUniformBlock const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlock; + const void * pNext = {}; + uint32_t dataSize = {}; + const void * pData = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSetInlineUniformBlock; + }; + + using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + struct XcbSurfaceCreateInfoKHR + { + using NativeType = VkXcbSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, + xcb_connection_t * connection_ = {}, + xcb_window_t window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , connection( connection_ ) + , window( window_ ) + { + } + + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : XcbSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT + { + connection = connection_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, connection, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = connection <=> rhs.connection; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( connection == rhs.connection ) && + ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 ); + } + + bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; + xcb_connection_t * connection = {}; + xcb_window_t window = {}; + }; + + template <> + struct CppType + { + using Type = XcbSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + struct XlibSurfaceCreateInfoKHR + { + using NativeType = VkXlibSurfaceCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, + Display * dpy_ = {}, + Window window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , dpy( dpy_ ) + , window( window_ ) + { + } + + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : XlibSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT + { + dpy = dpy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dpy, window ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = dpy <=> rhs.dpy; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &window, &rhs.window, sizeof( Window ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) && + ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 ); + } + + bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; + Display * dpy = {}; + Window window = {}; + }; + + template <> + struct CppType + { + using Type = XlibSurfaceCreateInfoKHR; + }; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_to_string.hpp b/MacroLibX/third_party/vulkan/vulkan_to_string.hpp new file mode 100644 index 0000000..ad4e5f7 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_to_string.hpp @@ -0,0 +1,9026 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_TO_STRING_HPP +#define VULKAN_TO_STRING_HPP + +#include + +#if __cpp_lib_format +# include // std::format +#else +# include // std::stringstream +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + + //========================== + //=== BITMASKs to_string === + //========================== + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FormatFeatureFlagBits::eSampledImage ) + result += "SampledImage | "; + if ( value & FormatFeatureFlagBits::eStorageImage ) + result += "StorageImage | "; + if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) + result += "StorageImageAtomic | "; + if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) + result += "StorageTexelBufferAtomic | "; + if ( value & FormatFeatureFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & FormatFeatureFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) + result += "ColorAttachmentBlend | "; + if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & FormatFeatureFlagBits::eBlitSrc ) + result += "BlitSrc | "; + if ( value & FormatFeatureFlagBits::eBlitDst ) + result += "BlitDst | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) + result += "SampledImageFilterLinear | "; + if ( value & FormatFeatureFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & FormatFeatureFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) + result += "MidpointChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) + result += "SampledImageYcbcrConversionLinearFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if ( value & FormatFeatureFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) + result += "CositedChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) + result += "SampledImageFilterMinmax | "; + if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR ) + result += "VideoDecodeOutputKHR | "; + if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; + if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) + result += "AccelerationStructureVertexBufferKHR | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicEXT ) + result += "SampledImageFilterCubicEXT | "; + if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; + if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR ) + result += "VideoEncodeInputKHR | "; + if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & ImageCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & ImageCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & ImageCreateFlagBits::eMutableFormat ) + result += "MutableFormat | "; + if ( value & ImageCreateFlagBits::eCubeCompatible ) + result += "CubeCompatible | "; + if ( value & ImageCreateFlagBits::eAlias ) + result += "Alias | "; + if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & ImageCreateFlagBits::e2DArrayCompatible ) + result += "2DArrayCompatible | "; + if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) + result += "BlockTexelViewCompatible | "; + if ( value & ImageCreateFlagBits::eExtendedUsage ) + result += "ExtendedUsage | "; + if ( value & ImageCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & ImageCreateFlagBits::eDisjoint ) + result += "Disjoint | "; + if ( value & ImageCreateFlagBits::eCornerSampledNV ) + result += "CornerSampledNV | "; + if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) + result += "SampleLocationsCompatibleDepthEXT | "; + if ( value & ImageCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + if ( value & ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; + if ( value & ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT ) + result += "MultisampledRenderToSingleSampledEXT | "; + if ( value & ImageCreateFlagBits::e2DViewCompatibleEXT ) + result += "2DViewCompatibleEXT | "; + if ( value & ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM ) + result += "FragmentDensityMapOffsetQCOM | "; + if ( value & ImageCreateFlagBits::eVideoProfileIndependentKHR ) + result += "VideoProfileIndependentKHR | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & ImageUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & ImageUsageFlagBits::eSampled ) + result += "Sampled | "; + if ( value & ImageUsageFlagBits::eStorage ) + result += "Storage | "; + if ( value & ImageUsageFlagBits::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & ImageUsageFlagBits::eTransientAttachment ) + result += "TransientAttachment | "; + if ( value & ImageUsageFlagBits::eInputAttachment ) + result += "InputAttachment | "; + if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; + if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; + if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; + if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; + if ( value & ImageUsageFlagBits::eHostTransferEXT ) + result += "HostTransferEXT | "; + if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; + if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; + if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; + if ( value & ImageUsageFlagBits::eAttachmentFeedbackLoopEXT ) + result += "AttachmentFeedbackLoopEXT | "; + if ( value & ImageUsageFlagBits::eInvocationMaskHUAWEI ) + result += "InvocationMaskHUAWEI | "; + if ( value & ImageUsageFlagBits::eSampleWeightQCOM ) + result += "SampleWeightQCOM | "; + if ( value & ImageUsageFlagBits::eSampleBlockMatchQCOM ) + result += "SampleBlockMatchQCOM | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & InstanceCreateFlagBits::eEnumeratePortabilityKHR ) + result += "EnumeratePortabilityKHR | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryHeapFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryHeapFlagBits::eMultiInstance ) + result += "MultiInstance | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryPropertyFlagBits::eDeviceLocal ) + result += "DeviceLocal | "; + if ( value & MemoryPropertyFlagBits::eHostVisible ) + result += "HostVisible | "; + if ( value & MemoryPropertyFlagBits::eHostCoherent ) + result += "HostCoherent | "; + if ( value & MemoryPropertyFlagBits::eHostCached ) + result += "HostCached | "; + if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) + result += "LazilyAllocated | "; + if ( value & MemoryPropertyFlagBits::eProtected ) + result += "Protected | "; + if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) + result += "DeviceCoherentAMD | "; + if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) + result += "DeviceUncachedAMD | "; + if ( value & MemoryPropertyFlagBits::eRdmaCapableNV ) + result += "RdmaCapableNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueueFlagBits::eGraphics ) + result += "Graphics | "; + if ( value & QueueFlagBits::eCompute ) + result += "Compute | "; + if ( value & QueueFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & QueueFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & QueueFlagBits::eProtected ) + result += "Protected | "; + if ( value & QueueFlagBits::eVideoDecodeKHR ) + result += "VideoDecodeKHR | "; + if ( value & QueueFlagBits::eVideoEncodeKHR ) + result += "VideoEncodeKHR | "; + if ( value & QueueFlagBits::eOpticalFlowNV ) + result += "OpticalFlowNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SampleCountFlagBits::e1 ) + result += "1 | "; + if ( value & SampleCountFlagBits::e2 ) + result += "2 | "; + if ( value & SampleCountFlagBits::e4 ) + result += "4 | "; + if ( value & SampleCountFlagBits::e8 ) + result += "8 | "; + if ( value & SampleCountFlagBits::e16 ) + result += "16 | "; + if ( value & SampleCountFlagBits::e32 ) + result += "32 | "; + if ( value & SampleCountFlagBits::e64 ) + result += "64 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceQueueCreateFlagBits::eProtected ) + result += "Protected | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & PipelineStageFlagBits::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits::eTransfer ) + result += "Transfer | "; + if ( value & PipelineStageFlagBits::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) + result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) + result += "AccelerationStructureBuildKHR | "; + if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) + result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; + if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; + if ( value & PipelineStageFlagBits::eTaskShaderEXT ) + result += "TaskShaderEXT | "; + if ( value & PipelineStageFlagBits::eMeshShaderEXT ) + result += "MeshShaderEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & ImageAspectFlagBits::eColor ) + result += "Color | "; + if ( value & ImageAspectFlagBits::eDepth ) + result += "Depth | "; + if ( value & ImageAspectFlagBits::eStencil ) + result += "Stencil | "; + if ( value & ImageAspectFlagBits::eMetadata ) + result += "Metadata | "; + if ( value & ImageAspectFlagBits::ePlane0 ) + result += "Plane0 | "; + if ( value & ImageAspectFlagBits::ePlane1 ) + result += "Plane1 | "; + if ( value & ImageAspectFlagBits::ePlane2 ) + result += "Plane2 | "; + if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) + result += "MemoryPlane0EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) + result += "MemoryPlane1EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) + result += "MemoryPlane2EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) + result += "MemoryPlane3EXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SparseImageFormatFlagBits::eSingleMiptail ) + result += "SingleMiptail | "; + if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) + result += "AlignedMipSize | "; + if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) + result += "NonstandardBlockSize | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SparseMemoryBindFlagBits::eMetadata ) + result += "Metadata | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FenceCreateFlagBits::eSignaled ) + result += "Signaled | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & EventCreateFlagBits::eDeviceOnly ) + result += "DeviceOnly | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) + result += "InputAssemblyVertices | "; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) + result += "InputAssemblyPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) + result += "VertexShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) + result += "GeometryShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) + result += "GeometryShaderPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) + result += "ClippingInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) + result += "ClippingPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) + result += "FragmentShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) + result += "TessellationControlShaderPatches | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) + result += "TessellationEvaluationShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) + result += "ComputeShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT ) + result += "TaskShaderInvocationsEXT | "; + if ( value & QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT ) + result += "MeshShaderInvocationsEXT | "; + if ( value & QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI ) + result += "ClusterCullingShaderInvocationsHUAWEI | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryResultFlagBits::e64 ) + result += "64 | "; + if ( value & QueryResultFlagBits::eWait ) + result += "Wait | "; + if ( value & QueryResultFlagBits::eWithAvailability ) + result += "WithAvailability | "; + if ( value & QueryResultFlagBits::ePartial ) + result += "Partial | "; + if ( value & QueryResultFlagBits::eWithStatusKHR ) + result += "WithStatusKHR | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BufferCreateFlagBits::eSparseBinding ) + result += "SparseBinding | "; + if ( value & BufferCreateFlagBits::eSparseResidency ) + result += "SparseResidency | "; + if ( value & BufferCreateFlagBits::eSparseAliased ) + result += "SparseAliased | "; + if ( value & BufferCreateFlagBits::eProtected ) + result += "Protected | "; + if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + if ( value & BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; + if ( value & BufferCreateFlagBits::eVideoProfileIndependentKHR ) + result += "VideoProfileIndependentKHR | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BufferUsageFlagBits::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits::eIndirectBuffer ) + result += "IndirectBuffer | "; + if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; + if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; + if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eExecutionGraphScratchAMDX ) + result += "ExecutionGraphScratchAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) + result += "AccelerationStructureBuildInputReadOnlyKHR | "; + if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) + result += "AccelerationStructureStorageKHR | "; + if ( value & BufferUsageFlagBits::eShaderBindingTableKHR ) + result += "ShaderBindingTableKHR | "; + if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; + if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; + if ( value & BufferUsageFlagBits::eSamplerDescriptorBufferEXT ) + result += "SamplerDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits::eResourceDescriptorBufferEXT ) + result += "ResourceDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT ) + result += "PushDescriptorsDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT ) + result += "MicromapBuildInputReadOnlyEXT | "; + if ( value & BufferUsageFlagBits::eMicromapStorageEXT ) + result += "MicromapStorageEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) + result += "FragmentDensityMapDynamicEXT | "; + if ( value & ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) + result += "FragmentDensityMapDeferredEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCacheCreateFlagBits::eExternallySynchronized ) + result += "ExternallySynchronized | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ColorComponentFlagBits::eR ) + result += "R | "; + if ( value & ColorComponentFlagBits::eG ) + result += "G | "; + if ( value & ColorComponentFlagBits::eB ) + result += "B | "; + if ( value & ColorComponentFlagBits::eA ) + result += "A | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & CullModeFlagBits::eFront ) + result += "Front | "; + if ( value & CullModeFlagBits::eBack ) + result += "Back | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT ) + result += "RasterizationOrderAttachmentAccessEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreateFlagBits::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits::eDerivative ) + result += "Derivative | "; + if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequired ) + result += "FailOnPipelineCompileRequired | "; + if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure ) + result += "EarlyReturnOnFailure | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) + result += "RenderingFragmentShadingRateAttachmentKHR | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) + result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) + result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) + result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) + result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) + result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) + result += "RayTracingSkipAabbsKHR | "; + if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) + result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; + if ( value & PipelineCreateFlagBits::eDeferCompileNV ) + result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) + result += "CaptureStatisticsKHR | "; + if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) + result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits::eLibraryKHR ) + result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits::eDescriptorBufferEXT ) + result += "DescriptorBufferEXT | "; + if ( value & PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT ) + result += "RetainLinkTimeOptimizationInfoEXT | "; + if ( value & PipelineCreateFlagBits::eLinkTimeOptimizationEXT ) + result += "LinkTimeOptimizationEXT | "; + if ( value & PipelineCreateFlagBits::eRayTracingAllowMotionNV ) + result += "RayTracingAllowMotionNV | "; + if ( value & PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT ) + result += "ColorAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT ) + result += "DepthStencilAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT ) + result += "RayTracingOpacityMicromapEXT | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV ) + result += "RayTracingDisplacementMicromapNV | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineCreateFlagBits::eNoProtectedAccessEXT ) + result += "NoProtectedAccessEXT | "; + if ( value & PipelineCreateFlagBits::eProtectedAccessOnlyEXT ) + result += "ProtectedAccessOnlyEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) + result += "RasterizationOrderAttachmentDepthAccessEXT | "; + if ( value & PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) + result += "RasterizationOrderAttachmentStencilAccessEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineLayoutCreateFlagBits::eIndependentSetsEXT ) + result += "IndependentSetsEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize ) + result += "AllowVaryingSubgroupSize | "; + if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroups ) + result += "RequireFullSubgroups | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ShaderStageFlagBits::eVertex ) + result += "Vertex | "; + if ( value & ShaderStageFlagBits::eTessellationControl ) + result += "TessellationControl | "; + if ( value & ShaderStageFlagBits::eTessellationEvaluation ) + result += "TessellationEvaluation | "; + if ( value & ShaderStageFlagBits::eGeometry ) + result += "Geometry | "; + if ( value & ShaderStageFlagBits::eFragment ) + result += "Fragment | "; + if ( value & ShaderStageFlagBits::eCompute ) + result += "Compute | "; + if ( value & ShaderStageFlagBits::eRaygenKHR ) + result += "RaygenKHR | "; + if ( value & ShaderStageFlagBits::eAnyHitKHR ) + result += "AnyHitKHR | "; + if ( value & ShaderStageFlagBits::eClosestHitKHR ) + result += "ClosestHitKHR | "; + if ( value & ShaderStageFlagBits::eMissKHR ) + result += "MissKHR | "; + if ( value & ShaderStageFlagBits::eIntersectionKHR ) + result += "IntersectionKHR | "; + if ( value & ShaderStageFlagBits::eCallableKHR ) + result += "CallableKHR | "; + if ( value & ShaderStageFlagBits::eTaskEXT ) + result += "TaskEXT | "; + if ( value & ShaderStageFlagBits::eMeshEXT ) + result += "MeshEXT | "; + if ( value & ShaderStageFlagBits::eSubpassShadingHUAWEI ) + result += "SubpassShadingHUAWEI | "; + if ( value & ShaderStageFlagBits::eClusterCullingHUAWEI ) + result += "ClusterCullingHUAWEI | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SamplerCreateFlagBits::eSubsampledEXT ) + result += "SubsampledEXT | "; + if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) + result += "SubsampledCoarseReconstructionEXT | "; + if ( value & SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; + if ( value & SamplerCreateFlagBits::eNonSeamlessCubeMapEXT ) + result += "NonSeamlessCubeMapEXT | "; + if ( value & SamplerCreateFlagBits::eImageProcessingQCOM ) + result += "ImageProcessingQCOM | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) + result += "FreeDescriptorSet | "; + if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorPoolCreateFlagBits::eHostOnlyEXT ) + result += "HostOnlyEXT | "; + if ( value & DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV ) + result += "AllowOverallocationSetsNV | "; + if ( value & DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV ) + result += "AllowOverallocationPoolsNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) + result += "UpdateAfterBindPool | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) + result += "PushDescriptorKHR | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT ) + result += "DescriptorBufferEXT | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT ) + result += "EmbeddedImmutableSamplersEXT | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT ) + result += "HostOnlyPoolEXT | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePerStageNV ) + result += "PerStageNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & AccessFlagBits::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) + result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) + result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) + result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) + result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) + result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) + result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) + result += "AccelerationStructureWriteKHR | "; + if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) + result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) + result += "FragmentShadingRateAttachmentReadKHR | "; + if ( value & AccessFlagBits::eCommandPreprocessReadNV ) + result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) + result += "CommandPreprocessWriteNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AttachmentDescriptionFlagBits::eMayAlias ) + result += "MayAlias | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DependencyFlagBits::eByRegion ) + result += "ByRegion | "; + if ( value & DependencyFlagBits::eDeviceGroup ) + result += "DeviceGroup | "; + if ( value & DependencyFlagBits::eViewLocal ) + result += "ViewLocal | "; + if ( value & DependencyFlagBits::eFeedbackLoopEXT ) + result += "FeedbackLoopEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FramebufferCreateFlagBits::eImageless ) + result += "Imageless | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & RenderPassCreateFlagBits::eTransformQCOM ) + result += "TransformQCOM | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) + result += "PerViewAttributesNVX | "; + if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) + result += "PerViewPositionXOnlyNVX | "; + if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) + result += "FragmentRegionQCOM | "; + if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) + result += "ShaderResolveQCOM | "; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT ) + result += "RasterizationOrderAttachmentColorAccessEXT | "; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) + result += "RasterizationOrderAttachmentDepthAccessEXT | "; + if ( value & SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) + result += "RasterizationOrderAttachmentStencilAccessEXT | "; + if ( value & SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT ) + result += "EnableLegacyDitheringEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandPoolCreateFlagBits::eTransient ) + result += "Transient | "; + if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) + result += "ResetCommandBuffer | "; + if ( value & CommandPoolCreateFlagBits::eProtected ) + result += "Protected | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandPoolResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandBufferResetFlagBits::eReleaseResources ) + result += "ReleaseResources | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) + result += "OneTimeSubmit | "; + if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) + result += "RenderPassContinue | "; + if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) + result += "SimultaneousUse | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & QueryControlFlagBits::ePrecise ) + result += "Precise | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & StencilFaceFlagBits::eFront ) + result += "Front | "; + if ( value & StencilFaceFlagBits::eBack ) + result += "Back | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubgroupFeatureFlagBits::eBasic ) + result += "Basic | "; + if ( value & SubgroupFeatureFlagBits::eVote ) + result += "Vote | "; + if ( value & SubgroupFeatureFlagBits::eArithmetic ) + result += "Arithmetic | "; + if ( value & SubgroupFeatureFlagBits::eBallot ) + result += "Ballot | "; + if ( value & SubgroupFeatureFlagBits::eShuffle ) + result += "Shuffle | "; + if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) + result += "ShuffleRelative | "; + if ( value & SubgroupFeatureFlagBits::eClustered ) + result += "Clustered | "; + if ( value & SubgroupFeatureFlagBits::eQuad ) + result += "Quad | "; + if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) + result += "PartitionedNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) + result += "CopySrc | "; + if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) + result += "CopyDst | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) + result += "GenericSrc | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) + result += "GenericDst | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryAllocateFlagBits::eDeviceMask ) + result += "DeviceMask | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddress ) + result += "DeviceAddress | "; + if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) + result += "D3D11Texture | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) + result += "D3D11TextureKmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) + result += "D3D12Heap | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) + result += "D3D12Resource | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) + result += "DmaBufEXT | "; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) + result += "AndroidHardwareBufferANDROID | "; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) + result += "HostAllocationEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) + result += "HostMappedForeignMemoryEXT | "; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) + result += "ZirconVmoFUCHSIA | "; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + if ( value & ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV ) + result += "RdmaAddressNV | "; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + if ( value & ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX ) + result += "ScreenBufferQNX | "; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBits::eImportable ) + result += "Importable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalFenceFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalFenceFeatureFlagBits::eImportable ) + result += "Importable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FenceImportFlagBits::eTemporary ) + result += "Temporary | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SemaphoreImportFlagBits::eTemporary ) + result += "Temporary | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) + result += "OpaqueFd | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) + result += "D3D12Fence | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) + result += "SyncFd | "; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) + result += "ZirconEventFUCHSIA | "; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) + result += "Exportable | "; + if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) + result += "Importable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) + result += "UpdateAfterBind | "; + if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) + result += "UpdateUnusedWhilePending | "; + if ( value & DescriptorBindingFlagBits::ePartiallyBound ) + result += "PartiallyBound | "; + if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) + result += "VariableDescriptorCount | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & ResolveModeFlagBits::eSampleZero ) + result += "SampleZero | "; + if ( value & ResolveModeFlagBits::eAverage ) + result += "Average | "; + if ( value & ResolveModeFlagBits::eMin ) + result += "Min | "; + if ( value & ResolveModeFlagBits::eMax ) + result += "Max | "; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + if ( value & ResolveModeFlagBits::eExternalFormatDownsampleANDROID ) + result += "ExternalFormatDownsampleANDROID | "; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SemaphoreWaitFlagBits::eAny ) + result += "Any | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreationFeedbackFlagBits::eValid ) + result += "Valid | "; + if ( value & PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit ) + result += "ApplicationPipelineCacheHit | "; + if ( value & PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration ) + result += "BasePipelineAcceleration | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ToolPurposeFlagBits::eValidation ) + result += "Validation | "; + if ( value & ToolPurposeFlagBits::eProfiling ) + result += "Profiling | "; + if ( value & ToolPurposeFlagBits::eTracing ) + result += "Tracing | "; + if ( value & ToolPurposeFlagBits::eAdditionalFeatures ) + result += "AdditionalFeatures | "; + if ( value & ToolPurposeFlagBits::eModifyingFeatures ) + result += "ModifyingFeatures | "; + if ( value & ToolPurposeFlagBits::eDebugReportingEXT ) + result += "DebugReportingEXT | "; + if ( value & ToolPurposeFlagBits::eDebugMarkersEXT ) + result += "DebugMarkersEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlags ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2 value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & PipelineStageFlagBits2::eTopOfPipe ) + result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits2::eDrawIndirect ) + result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits2::eVertexInput ) + result += "VertexInput | "; + if ( value & PipelineStageFlagBits2::eVertexShader ) + result += "VertexShader | "; + if ( value & PipelineStageFlagBits2::eTessellationControlShader ) + result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits2::eTessellationEvaluationShader ) + result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits2::eGeometryShader ) + result += "GeometryShader | "; + if ( value & PipelineStageFlagBits2::eFragmentShader ) + result += "FragmentShader | "; + if ( value & PipelineStageFlagBits2::eEarlyFragmentTests ) + result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits2::eLateFragmentTests ) + result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits2::eColorAttachmentOutput ) + result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits2::eComputeShader ) + result += "ComputeShader | "; + if ( value & PipelineStageFlagBits2::eAllTransfer ) + result += "AllTransfer | "; + if ( value & PipelineStageFlagBits2::eBottomOfPipe ) + result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits2::eHost ) + result += "Host | "; + if ( value & PipelineStageFlagBits2::eAllGraphics ) + result += "AllGraphics | "; + if ( value & PipelineStageFlagBits2::eAllCommands ) + result += "AllCommands | "; + if ( value & PipelineStageFlagBits2::eCopy ) + result += "Copy | "; + if ( value & PipelineStageFlagBits2::eResolve ) + result += "Resolve | "; + if ( value & PipelineStageFlagBits2::eBlit ) + result += "Blit | "; + if ( value & PipelineStageFlagBits2::eClear ) + result += "Clear | "; + if ( value & PipelineStageFlagBits2::eIndexInput ) + result += "IndexInput | "; + if ( value & PipelineStageFlagBits2::eVertexAttributeInput ) + result += "VertexAttributeInput | "; + if ( value & PipelineStageFlagBits2::ePreRasterizationShaders ) + result += "PreRasterizationShaders | "; + if ( value & PipelineStageFlagBits2::eVideoDecodeKHR ) + result += "VideoDecodeKHR | "; + if ( value & PipelineStageFlagBits2::eVideoEncodeKHR ) + result += "VideoEncodeKHR | "; + if ( value & PipelineStageFlagBits2::eTransformFeedbackEXT ) + result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits2::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits2::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; + if ( value & PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; + if ( value & PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) + result += "AccelerationStructureBuildKHR | "; + if ( value & PipelineStageFlagBits2::eRayTracingShaderKHR ) + result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits2::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; + if ( value & PipelineStageFlagBits2::eTaskShaderEXT ) + result += "TaskShaderEXT | "; + if ( value & PipelineStageFlagBits2::eMeshShaderEXT ) + result += "MeshShaderEXT | "; + if ( value & PipelineStageFlagBits2::eSubpassShaderHUAWEI ) + result += "SubpassShaderHUAWEI | "; + if ( value & PipelineStageFlagBits2::eInvocationMaskHUAWEI ) + result += "InvocationMaskHUAWEI | "; + if ( value & PipelineStageFlagBits2::eAccelerationStructureCopyKHR ) + result += "AccelerationStructureCopyKHR | "; + if ( value & PipelineStageFlagBits2::eMicromapBuildEXT ) + result += "MicromapBuildEXT | "; + if ( value & PipelineStageFlagBits2::eClusterCullingShaderHUAWEI ) + result += "ClusterCullingShaderHUAWEI | "; + if ( value & PipelineStageFlagBits2::eOpticalFlowNV ) + result += "OpticalFlowNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags2 value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & AccessFlagBits2::eIndirectCommandRead ) + result += "IndirectCommandRead | "; + if ( value & AccessFlagBits2::eIndexRead ) + result += "IndexRead | "; + if ( value & AccessFlagBits2::eVertexAttributeRead ) + result += "VertexAttributeRead | "; + if ( value & AccessFlagBits2::eUniformRead ) + result += "UniformRead | "; + if ( value & AccessFlagBits2::eInputAttachmentRead ) + result += "InputAttachmentRead | "; + if ( value & AccessFlagBits2::eShaderRead ) + result += "ShaderRead | "; + if ( value & AccessFlagBits2::eShaderWrite ) + result += "ShaderWrite | "; + if ( value & AccessFlagBits2::eColorAttachmentRead ) + result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits2::eColorAttachmentWrite ) + result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits2::eDepthStencilAttachmentRead ) + result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits2::eDepthStencilAttachmentWrite ) + result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits2::eTransferRead ) + result += "TransferRead | "; + if ( value & AccessFlagBits2::eTransferWrite ) + result += "TransferWrite | "; + if ( value & AccessFlagBits2::eHostRead ) + result += "HostRead | "; + if ( value & AccessFlagBits2::eHostWrite ) + result += "HostWrite | "; + if ( value & AccessFlagBits2::eMemoryRead ) + result += "MemoryRead | "; + if ( value & AccessFlagBits2::eMemoryWrite ) + result += "MemoryWrite | "; + if ( value & AccessFlagBits2::eShaderSampledRead ) + result += "ShaderSampledRead | "; + if ( value & AccessFlagBits2::eShaderStorageRead ) + result += "ShaderStorageRead | "; + if ( value & AccessFlagBits2::eShaderStorageWrite ) + result += "ShaderStorageWrite | "; + if ( value & AccessFlagBits2::eVideoDecodeReadKHR ) + result += "VideoDecodeReadKHR | "; + if ( value & AccessFlagBits2::eVideoDecodeWriteKHR ) + result += "VideoDecodeWriteKHR | "; + if ( value & AccessFlagBits2::eVideoEncodeReadKHR ) + result += "VideoEncodeReadKHR | "; + if ( value & AccessFlagBits2::eVideoEncodeWriteKHR ) + result += "VideoEncodeWriteKHR | "; + if ( value & AccessFlagBits2::eTransformFeedbackWriteEXT ) + result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits2::eTransformFeedbackCounterReadEXT ) + result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) + result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits2::eConditionalRenderingReadEXT ) + result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits2::eCommandPreprocessReadNV ) + result += "CommandPreprocessReadNV | "; + if ( value & AccessFlagBits2::eCommandPreprocessWriteNV ) + result += "CommandPreprocessWriteNV | "; + if ( value & AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) + result += "FragmentShadingRateAttachmentReadKHR | "; + if ( value & AccessFlagBits2::eAccelerationStructureReadKHR ) + result += "AccelerationStructureReadKHR | "; + if ( value & AccessFlagBits2::eAccelerationStructureWriteKHR ) + result += "AccelerationStructureWriteKHR | "; + if ( value & AccessFlagBits2::eFragmentDensityMapReadEXT ) + result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) + result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits2::eDescriptorBufferReadEXT ) + result += "DescriptorBufferReadEXT | "; + if ( value & AccessFlagBits2::eInvocationMaskReadHUAWEI ) + result += "InvocationMaskReadHUAWEI | "; + if ( value & AccessFlagBits2::eShaderBindingTableReadKHR ) + result += "ShaderBindingTableReadKHR | "; + if ( value & AccessFlagBits2::eMicromapReadEXT ) + result += "MicromapReadEXT | "; + if ( value & AccessFlagBits2::eMicromapWriteEXT ) + result += "MicromapWriteEXT | "; + if ( value & AccessFlagBits2::eOpticalFlowReadNV ) + result += "OpticalFlowReadNV | "; + if ( value & AccessFlagBits2::eOpticalFlowWriteNV ) + result += "OpticalFlowWriteNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( SubmitFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SubmitFlagBits::eProtected ) + result += "Protected | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( RenderingFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & RenderingFlagBits::eContentsSecondaryCommandBuffers ) + result += "ContentsSecondaryCommandBuffers | "; + if ( value & RenderingFlagBits::eSuspending ) + result += "Suspending | "; + if ( value & RenderingFlagBits::eResuming ) + result += "Resuming | "; + if ( value & RenderingFlagBits::eContentsInlineEXT ) + result += "ContentsInlineEXT | "; + if ( value & RenderingFlagBits::eEnableLegacyDitheringEXT ) + result += "EnableLegacyDitheringEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags2 value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FormatFeatureFlagBits2::eSampledImage ) + result += "SampledImage | "; + if ( value & FormatFeatureFlagBits2::eStorageImage ) + result += "StorageImage | "; + if ( value & FormatFeatureFlagBits2::eStorageImageAtomic ) + result += "StorageImageAtomic | "; + if ( value & FormatFeatureFlagBits2::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & FormatFeatureFlagBits2::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & FormatFeatureFlagBits2::eStorageTexelBufferAtomic ) + result += "StorageTexelBufferAtomic | "; + if ( value & FormatFeatureFlagBits2::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & FormatFeatureFlagBits2::eColorAttachment ) + result += "ColorAttachment | "; + if ( value & FormatFeatureFlagBits2::eColorAttachmentBlend ) + result += "ColorAttachmentBlend | "; + if ( value & FormatFeatureFlagBits2::eDepthStencilAttachment ) + result += "DepthStencilAttachment | "; + if ( value & FormatFeatureFlagBits2::eBlitSrc ) + result += "BlitSrc | "; + if ( value & FormatFeatureFlagBits2::eBlitDst ) + result += "BlitDst | "; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterLinear ) + result += "SampledImageFilterLinear | "; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic ) + result += "SampledImageFilterCubic | "; + if ( value & FormatFeatureFlagBits2::eTransferSrc ) + result += "TransferSrc | "; + if ( value & FormatFeatureFlagBits2::eTransferDst ) + result += "TransferDst | "; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterMinmax ) + result += "SampledImageFilterMinmax | "; + if ( value & FormatFeatureFlagBits2::eMidpointChromaSamples ) + result += "MidpointChromaSamples | "; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter ) + result += "SampledImageYcbcrConversionLinearFilter | "; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter ) + result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if ( value & FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) + result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if ( value & FormatFeatureFlagBits2::eDisjoint ) + result += "Disjoint | "; + if ( value & FormatFeatureFlagBits2::eCositedChromaSamples ) + result += "CositedChromaSamples | "; + if ( value & FormatFeatureFlagBits2::eStorageReadWithoutFormat ) + result += "StorageReadWithoutFormat | "; + if ( value & FormatFeatureFlagBits2::eStorageWriteWithoutFormat ) + result += "StorageWriteWithoutFormat | "; + if ( value & FormatFeatureFlagBits2::eSampledImageDepthComparison ) + result += "SampledImageDepthComparison | "; + if ( value & FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) + result += "VideoDecodeOutputKHR | "; + if ( value & FormatFeatureFlagBits2::eVideoDecodeDpbKHR ) + result += "VideoDecodeDpbKHR | "; + if ( value & FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) + result += "AccelerationStructureVertexBufferKHR | "; + if ( value & FormatFeatureFlagBits2::eFragmentDensityMapEXT ) + result += "FragmentDensityMapEXT | "; + if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; + if ( value & FormatFeatureFlagBits2::eHostImageTransferEXT ) + result += "HostImageTransferEXT | "; + if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR ) + result += "VideoEncodeInputKHR | "; + if ( value & FormatFeatureFlagBits2::eVideoEncodeDpbKHR ) + result += "VideoEncodeDpbKHR | "; + if ( value & FormatFeatureFlagBits2::eLinearColorAttachmentNV ) + result += "LinearColorAttachmentNV | "; + if ( value & FormatFeatureFlagBits2::eWeightImageQCOM ) + result += "WeightImageQCOM | "; + if ( value & FormatFeatureFlagBits2::eWeightSampledImageQCOM ) + result += "WeightSampledImageQCOM | "; + if ( value & FormatFeatureFlagBits2::eBlockMatchingQCOM ) + result += "BlockMatchingQCOM | "; + if ( value & FormatFeatureFlagBits2::eBoxFilterSampledQCOM ) + result += "BoxFilterSampledQCOM | "; + if ( value & FormatFeatureFlagBits2::eOpticalFlowImageNV ) + result += "OpticalFlowImageNV | "; + if ( value & FormatFeatureFlagBits2::eOpticalFlowVectorNV ) + result += "OpticalFlowVectorNV | "; + if ( value & FormatFeatureFlagBits2::eOpticalFlowCostNV ) + result += "OpticalFlowCostNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_surface === + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) + result += "PreMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) + result += "PostMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::eInherit ) + result += "Inherit | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) + result += "SplitInstanceBindRegions | "; + if ( value & SwapchainCreateFlagBitsKHR::eProtected ) + result += "Protected | "; + if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) + result += "MutableFormat | "; + if ( value & SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT ) + result += "DeferredMemoryAllocationEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) + result += "Local | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) + result += "Remote | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) + result += "Sum | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) + result += "LocalMultiDevice | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_display === + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) + result += "Global | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) + result += "PerPixel | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) + result += "PerPixelPremultiplied | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) + result += "Identity | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) + result += "Rotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) + result += "Rotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) + result += "Rotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) + result += "HorizontalMirror | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) + result += "HorizontalMirrorRotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) + result += "HorizontalMirrorRotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) + result += "HorizontalMirrorRotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eInherit ) + result += "Inherit | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugReportFlagBitsEXT::eInformation ) + result += "Information | "; + if ( value & DebugReportFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) + result += "PerformanceWarning | "; + if ( value & DebugReportFlagBitsEXT::eError ) + result += "Error | "; + if ( value & DebugReportFlagBitsEXT::eDebug ) + result += "Debug | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_video_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264 ) + result += "EncodeH264 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH265 ) + result += "EncodeH265 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264 ) + result += "DecodeH264 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265 ) + result += "DecodeH265 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value ) + { + if ( !value ) + return "Invalid"; + + std::string result; + if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) + result += "Monochrome | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 ) + result += "420 | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 ) + result += "422 | "; + if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 ) + result += "444 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value ) + { + if ( !value ) + return "Invalid"; + + std::string result; + if ( value & VideoComponentBitDepthFlagBitsKHR::e8 ) + result += "8 | "; + if ( value & VideoComponentBitDepthFlagBitsKHR::e10 ) + result += "10 | "; + if ( value & VideoComponentBitDepthFlagBitsKHR::e12 ) + result += "12 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoCapabilityFlagBitsKHR::eProtectedContent ) + result += "ProtectedContent | "; + if ( value & VideoCapabilityFlagBitsKHR::eSeparateReferenceImages ) + result += "SeparateReferenceImages | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent ) + result += "ProtectedContent | "; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations ) + result += "AllowEncodeParameterOptimizations | "; + if ( value & VideoSessionCreateFlagBitsKHR::eInlineQueries ) + result += "InlineQueries | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoCodingControlFlagBitsKHR::eReset ) + result += "Reset | "; + if ( value & VideoCodingControlFlagBitsKHR::eEncodeRateControl ) + result += "EncodeRateControl | "; + if ( value & VideoCodingControlFlagBitsKHR::eEncodeQualityLevel ) + result += "EncodeQualityLevel | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_video_decode_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide ) + result += "DpbAndOutputCoincide | "; + if ( value & VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct ) + result += "DpbAndOutputDistinct | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeUsageFlagsKHR value ) + { + if ( !value ) + return "Default"; + + std::string result; + if ( value & VideoDecodeUsageFlagBitsKHR::eTranscoding ) + result += "Transcoding | "; + if ( value & VideoDecodeUsageFlagBitsKHR::eOffline ) + result += "Offline | "; + if ( value & VideoDecodeUsageFlagBitsKHR::eStreaming ) + result += "Streaming | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagsKHR ) + { + return "{}"; + } + + //=== VK_EXT_transform_feedback === + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_video_encode_h264 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance ) + result += "HrdCompliance | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated ) + result += "PredictionWeightTableGenerated | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice ) + result += "RowUnalignedSlice | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType ) + result += "DifferentSliceType | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List ) + result += "BFrameInL0List | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List ) + result += "BFrameInL1List | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp ) + result += "PerPictureTypeMinMaxQp | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp ) + result += "PerSliceConstantQp | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu ) + result += "GeneratePrefixNalu | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264StdFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet ) + result += "SeparateColorPlaneFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet ) + result += "QpprimeYZeroTransformBypassFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet ) + result += "ScalingMatrixPresentFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset ) + result += "ChromaQpIndexOffset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset ) + result += "SecondChromaQpIndexOffset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26 ) + result += "PicInitQpMinus26 | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet ) + result += "WeightedPredFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit ) + result += "WeightedBipredIdcExplicit | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit ) + result += "WeightedBipredIdcImplicit | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet ) + result += "Transform8X8ModeFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset ) + result += "DirectSpatialMvPredFlagUnset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset ) + result += "EntropyCodingModeFlagUnset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet ) + result += "EntropyCodingModeFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset ) + result += "Direct8X8InferenceFlagUnset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet ) + result += "ConstrainedIntraPredFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled ) + result += "DeblockingFilterDisabled | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled ) + result += "DeblockingFilterEnabled | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial ) + result += "DeblockingFilterPartial | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta ) + result += "SliceQpDelta | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta ) + result += "DifferentSliceQpDelta | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance ) + result += "AttemptHrdCompliance | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eRegularGop ) + result += "RegularGop | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat ) + result += "ReferencePatternFlat | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += "ReferencePatternDyadic | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic ) + result += "TemporalLayerPatternDyadic | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_video_encode_h265 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance ) + result += "HrdCompliance | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated ) + result += "PredictionWeightTableGenerated | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment ) + result += "RowUnalignedSliceSegment | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType ) + result += "DifferentSliceSegmentType | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List ) + result += "BFrameInL0List | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List ) + result += "BFrameInL1List | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp ) + result += "PerPictureTypeMinMaxQp | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp ) + result += "PerSliceSegmentConstantQp | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment ) + result += "MultipleTilesPerSliceSegment | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile ) + result += "MultipleSliceSegmentsPerTile | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265StdFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet ) + result += "SeparateColorPlaneFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet ) + result += "SampleAdaptiveOffsetEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet ) + result += "ScalingListDataPresentFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet ) + result += "PcmEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet ) + result += "SpsTemporalMvpEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26 ) + result += "InitQpMinus26 | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet ) + result += "WeightedPredFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet ) + result += "WeightedBipredFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2 ) + result += "Log2ParallelMergeLevelMinus2 | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet ) + result += "SignDataHidingEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet ) + result += "TransformSkipEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset ) + result += "TransformSkipEnabledFlagUnset | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet ) + result += "PpsSliceChromaQpOffsetsPresentFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet ) + result += "TransquantBypassEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet ) + result += "ConstrainedIntraPredFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet ) + result += "EntropyCodingSyncEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet ) + result += "DeblockingFilterOverrideEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet ) + result += "DependentSliceSegmentsEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet ) + result += "DependentSliceSegmentFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta ) + result += "SliceQpDelta | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta ) + result += "DifferentSliceQpDelta | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e16 ) + result += "16 | "; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e32 ) + result += "32 | "; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e64 ) + result += "64 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4 ) + result += "4 | "; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8 ) + result += "8 | "; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16 ) + result += "16 | "; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32 ) + result += "32 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance ) + result += "AttemptHrdCompliance | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eRegularGop ) + result += "RegularGop | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat ) + result += "ReferencePatternFlat | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += "ReferencePatternDyadic | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic ) + result += "TemporalSubLayerPatternDyadic | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_video_decode_h264 === + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagsKHR value ) + { + if ( !value ) + return "Progressive"; + + std::string result; + if ( value & VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines ) + result += "InterlacedInterleavedLines | "; + if ( value & VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes ) + result += "InterlacedSeparatePlanes | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) + result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) + result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) + result += "D3D11Image | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) + result += "D3D11ImageKmt | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) + result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) + result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) + result += "Importable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_conditional_rendering === + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) + result += "Inverted | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & SurfaceCounterFlagBitsEXT::eVblank ) + result += "Vblank | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_viewport_swizzle === + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_discard_rectangles === + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_conservative_rasterization === + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_depth_clip_enable === + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_performance_query === + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) + result += "PerformanceImpacting | "; + if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) + result += "ConcurrentlyImpacted | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR ) + { + return "{}"; + } + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) + result += "Verbose | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) + result += "Info | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) + result += "Warning | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) + result += "Error | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) + result += "General | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) + result += "Validation | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) + result += "Performance | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding ) + result += "DeviceAddressBinding | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_NV_fragment_coverage_to_color === + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_KHR_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & GeometryFlagBitsKHR::eOpaque ) + result += "Opaque | "; + if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) + result += "NoDuplicateAnyHitInvocation | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) + result += "TriangleFacingCullDisable | "; + if ( value & GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) + result += "TriangleFlipFacing | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) + result += "ForceOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) + result += "ForceNoOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT ) + result += "ForceOpacityMicromap2StateEXT | "; + if ( value & GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT ) + result += "DisableOpacityMicromapsEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) + result += "AllowUpdate | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) + result += "AllowCompaction | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) + result += "PreferFastTrace | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) + result += "PreferFastBuild | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) + result += "LowMemory | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eMotionNV ) + result += "MotionNV | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT ) + result += "AllowOpacityMicromapUpdateEXT | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT ) + result += "AllowDisableOpacityMicromapsEXT | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT ) + result += "AllowOpacityMicromapDataUpdateEXT | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV ) + result += "AllowDisplacementMicromapUpdateNV | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess ) + result += "AllowDataAccess | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + if ( value & AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; + if ( value & AccelerationStructureCreateFlagBitsKHR::eMotionNV ) + result += "MotionNV | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_framebuffer_mixed_samples === + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_validation_cache === + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_AMD_pipeline_compiler_control === + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) + { + return "{}"; + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_AMD_shader_core_properties2 === + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) + { + return "{}"; + } + + //=== VK_NV_coverage_reduction_mode === + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_headless_surface === + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) + { + return "{}"; + } + + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & HostImageCopyFlagBitsEXT::eMemcpy ) + result += "Memcpy | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_map_memory2 === + + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagsKHR ) + { + return "{}"; + } + + //=== VK_EXT_surface_maintenance1 === + + VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PresentScalingFlagBitsEXT::eOneToOne ) + result += "OneToOne | "; + if ( value & PresentScalingFlagBitsEXT::eAspectRatioStretch ) + result += "AspectRatioStretch | "; + if ( value & PresentScalingFlagBitsEXT::eStretch ) + result += "Stretch | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PresentGravityFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PresentGravityFlagBitsEXT::eMin ) + result += "Min | "; + if ( value & PresentGravityFlagBitsEXT::eMax ) + result += "Max | "; + if ( value & PresentGravityFlagBitsEXT::eCentered ) + result += "Centered | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_device_generated_commands === + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) + result += "FlagFrontface | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) + result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) + result += "IndexedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) + result += "UnorderedSequences | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_device_memory_report === + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT ) + { + return "{}"; + } + + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes ) + result += "PrecedingExternallyEncodedBytes | "; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eInsufficientstreamBufferRangeDetectionBit ) + result += "InsufficientstreamBufferRangeDetectionBit | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFeedbackFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeFeedbackFlagBitsKHR::estreamBufferOffsetBit ) + result += "streamBufferOffsetBit | "; + if ( value & VideoEncodeFeedbackFlagBitsKHR::estreamBytesWrittenBit ) + result += "streamBytesWrittenBit | "; + if ( value & VideoEncodeFeedbackFlagBitsKHR::estreamHasOverridesBit ) + result += "streamHasOverridesBit | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagsKHR value ) + { + if ( !value ) + return "Default"; + + std::string result; + if ( value & VideoEncodeUsageFlagBitsKHR::eTranscoding ) + result += "Transcoding | "; + if ( value & VideoEncodeUsageFlagBitsKHR::eStreaming ) + result += "Streaming | "; + if ( value & VideoEncodeUsageFlagBitsKHR::eRecording ) + result += "Recording | "; + if ( value & VideoEncodeUsageFlagBitsKHR::eConferencing ) + result += "Conferencing | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeContentFlagsKHR value ) + { + if ( !value ) + return "Default"; + + std::string result; + if ( value & VideoEncodeContentFlagBitsKHR::eCamera ) + result += "Camera | "; + if ( value & VideoEncodeContentFlagBitsKHR::eDesktop ) + result += "Desktop | "; + if ( value & VideoEncodeContentFlagBitsKHR::eRendered ) + result += "Rendered | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagsKHR ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value ) + { + if ( !value ) + return "Default"; + + std::string result; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eDisabled ) + result += "Disabled | "; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eCbr ) + result += "Cbr | "; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eVbr ) + result += "Vbr | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_device_diagnostics_config === + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) + result += "EnableShaderDebugInfo | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) + result += "EnableResourceTracking | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) + result += "EnableAutomaticCheckpoints | "; + if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting ) + result += "EnableShaderErrorReporting | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + VULKAN_HPP_INLINE std::string to_string( ExportMetalObjectTypeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalDevice ) + result += "MetalDevice | "; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue ) + result += "MetalCommandQueue | "; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer ) + result += "MetalBuffer | "; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalTexture ) + result += "MetalTexture | "; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface ) + result += "MetalIosurface | "; + if ( value & ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent ) + result += "MetalSharedEvent | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + + VULKAN_HPP_INLINE std::string to_string( GraphicsPipelineLibraryFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface ) + result += "VertexInputInterface | "; + if ( value & GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders ) + result += "PreRasterizationShaders | "; + if ( value & GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader ) + result += "FragmentShader | "; + if ( value & GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface ) + result += "FragmentOutputInterface | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_ray_tracing_motion_blur === + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagsNV ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagsNV ) + { + return "{}"; + } + + //=== VK_EXT_image_compression_control === + + VULKAN_HPP_INLINE std::string to_string( ImageCompressionFlagsEXT value ) + { + if ( !value ) + return "Default"; + + std::string result; + if ( value & ImageCompressionFlagBitsEXT::eFixedRateDefault ) + result += "FixedRateDefault | "; + if ( value & ImageCompressionFlagBitsEXT::eFixedRateExplicit ) + result += "FixedRateExplicit | "; + if ( value & ImageCompressionFlagBitsEXT::eDisabled ) + result += "Disabled | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageCompressionFixedRateFlagsEXT value ) + { + if ( !value ) + return "None"; + + std::string result; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e1Bpc ) + result += "1Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e2Bpc ) + result += "2Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e3Bpc ) + result += "3Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e4Bpc ) + result += "4Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e5Bpc ) + result += "5Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e6Bpc ) + result += "6Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e7Bpc ) + result += "7Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e8Bpc ) + result += "8Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e9Bpc ) + result += "9Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e10Bpc ) + result += "10Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e11Bpc ) + result += "11Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e12Bpc ) + result += "12Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e13Bpc ) + result += "13Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e14Bpc ) + result += "14Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e15Bpc ) + result += "15Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e16Bpc ) + result += "16Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e17Bpc ) + result += "17Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e18Bpc ) + result += "18Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e19Bpc ) + result += "19Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e20Bpc ) + result += "20Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e21Bpc ) + result += "21Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e22Bpc ) + result += "22Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e23Bpc ) + result += "23Bpc | "; + if ( value & ImageCompressionFixedRateFlagBitsEXT::e24Bpc ) + result += "24Bpc | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + + VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceAddressBindingFlagBitsEXT::eInternalObject ) + result += "InternalObject | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagsFUCHSIA ) + { + return "{}"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagsFUCHSIA value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely ) + result += "CpuReadRarely | "; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften ) + result += "CpuReadOften | "; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely ) + result += "CpuWriteRarely | "; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften ) + result += "CpuWriteOften | "; + if ( value & ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional ) + result += "ProtectedOptional | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_frame_boundary === + + VULKAN_HPP_INLINE std::string to_string( FrameBoundaryFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FrameBoundaryFlagBitsEXT::eFrameEnd ) + result += "FrameEnd | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagsQNX ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BuildMicromapFlagBitsEXT::ePreferFastTrace ) + result += "PreferFastTrace | "; + if ( value & BuildMicromapFlagBitsEXT::ePreferFastBuild ) + result += "PreferFastBuild | "; + if ( value & BuildMicromapFlagBitsEXT::eAllowCompaction ) + result += "AllowCompaction | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_ARM_scheduling_controls === + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagsARM value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount ) + result += "ShaderCoreCount | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryDecompressionMethodFlagBitsNV::eGdeflate10 ) + result += "Gdeflate10 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_LUNARG_direct_driver_loading === + + VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingFlagsLUNARG ) + { + return "{}"; + } + + //=== VK_NV_optical_flow === + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagsNV value ) + { + if ( !value ) + return "Unknown"; + + std::string result; + if ( value & OpticalFlowUsageFlagBitsNV::eInput ) + result += "Input | "; + if ( value & OpticalFlowUsageFlagBitsNV::eOutput ) + result += "Output | "; + if ( value & OpticalFlowUsageFlagBitsNV::eHint ) + result += "Hint | "; + if ( value & OpticalFlowUsageFlagBitsNV::eCost ) + result += "Cost | "; + if ( value & OpticalFlowUsageFlagBitsNV::eGlobalFlow ) + result += "GlobalFlow | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagsNV value ) + { + if ( !value ) + return "Unknown"; + + std::string result; + if ( value & OpticalFlowGridSizeFlagBitsNV::e1X1 ) + result += "1X1 | "; + if ( value & OpticalFlowGridSizeFlagBitsNV::e2X2 ) + result += "2X2 | "; + if ( value & OpticalFlowGridSizeFlagBitsNV::e4X4 ) + result += "4X4 | "; + if ( value & OpticalFlowGridSizeFlagBitsNV::e8X8 ) + result += "8X8 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableHint ) + result += "EnableHint | "; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableCost ) + result += "EnableCost | "; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow ) + result += "EnableGlobalFlow | "; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eAllowRegions ) + result += "AllowRegions | "; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eBothDirections ) + result += "BothDirections | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints ) + result += "DisableTemporalHints | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags2KHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreateFlagBits2KHR::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits2KHR::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits2KHR::eDerivative ) + result += "Derivative | "; + if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits2KHR::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits2KHR::eDeferCompileNV ) + result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits2KHR::eCaptureStatistics ) + result += "CaptureStatistics | "; + if ( value & PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations ) + result += "CaptureInternalRepresentations | "; + if ( value & PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired ) + result += "FailOnPipelineCompileRequired | "; + if ( value & PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure ) + result += "EarlyReturnOnFailure | "; + if ( value & PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT ) + result += "LinkTimeOptimizationEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT ) + result += "RetainLinkTimeOptimizationInfoEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eLibrary ) + result += "Library | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles ) + result += "RayTracingSkipTriangles | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs ) + result += "RayTracingSkipAabbs | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders ) + result += "RayTracingNoNullAnyHitShaders | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders ) + result += "RayTracingNoNullClosestHitShaders | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders ) + result += "RayTracingNoNullMissShaders | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders ) + result += "RayTracingNoNullIntersectionShaders | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay ) + result += "RayTracingShaderGroupHandleCaptureReplay | "; + if ( value & PipelineCreateFlagBits2KHR::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV ) + result += "RayTracingAllowMotionNV | "; + if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment ) + result += "RenderingFragmentShadingRateAttachment | "; + if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT ) + result += "RayTracingOpacityMicromapEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT ) + result += "ColorAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT ) + result += "DepthStencilAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT ) + result += "NoProtectedAccessEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT ) + result += "ProtectedAccessOnlyEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV ) + result += "RayTracingDisplacementMicromapNV | "; + if ( value & PipelineCreateFlagBits2KHR::eDescriptorBufferEXT ) + result += "DescriptorBufferEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags2KHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BufferUsageFlagBits2KHR::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits2KHR::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits2KHR::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eIndirectBuffer ) + result += "IndirectBuffer | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX ) + result += "ExecutionGraphScratchAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits2KHR::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & BufferUsageFlagBits2KHR::eShaderBindingTable ) + result += "ShaderBindingTable | "; + if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::eVideoDecodeSrc ) + result += "VideoDecodeSrc | "; + if ( value & BufferUsageFlagBits2KHR::eVideoDecodeDst ) + result += "VideoDecodeDst | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits2KHR::eVideoEncodeDst ) + result += "VideoEncodeDst | "; + if ( value & BufferUsageFlagBits2KHR::eVideoEncodeSrc ) + result += "VideoEncodeSrc | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits2KHR::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; + if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly ) + result += "AccelerationStructureBuildInputReadOnly | "; + if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureStorage ) + result += "AccelerationStructureStorage | "; + if ( value & BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT ) + result += "SamplerDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT ) + result += "ResourceDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT ) + result += "PushDescriptorsDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT ) + result += "MicromapBuildInputReadOnlyEXT | "; + if ( value & BufferUsageFlagBits2KHR::eMicromapStorageEXT ) + result += "MicromapStorageEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ShaderCreateFlagBitsEXT::eLinkStage ) + result += "LinkStage | "; + if ( value & ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize ) + result += "AllowVaryingSubgroupSize | "; + if ( value & ShaderCreateFlagBitsEXT::eRequireFullSubgroups ) + result += "RequireFullSubgroups | "; + if ( value & ShaderCreateFlagBitsEXT::eNoTaskShader ) + result += "NoTaskShader | "; + if ( value & ShaderCreateFlagBitsEXT::eDispatchBase ) + result += "DispatchBase | "; + if ( value & ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment ) + result += "FragmentShadingRateAttachment | "; + if ( value & ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment ) + result += "FragmentDensityMapAttachment | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //======================= + //=== ENUMs to_string === + //======================= + + VULKAN_HPP_INLINE std::string toHexString( uint32_t value ) + { +#if __cpp_lib_format + return std::format( "{:x}", value ); +#else + std::stringstream stream; + stream << std::hex << value; + return stream.str(); +#endif + } + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_INLINE std::string to_string( Result value ) + { + switch ( value ) + { + case Result::eSuccess: return "Success"; + case Result::eNotReady: return "NotReady"; + case Result::eTimeout: return "Timeout"; + case Result::eEventSet: return "EventSet"; + case Result::eEventReset: return "EventReset"; + case Result::eIncomplete: return "Incomplete"; + case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed: return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost: return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects: return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool: return "ErrorFragmentedPool"; + case Result::eErrorUnknown: return "ErrorUnknown"; + case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle"; + case Result::eErrorFragmentation: return "ErrorFragmentation"; + case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress"; + case Result::ePipelineCompileRequired: return "PipelineCompileRequired"; + case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR: return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; + case Result::eErrorImageUsageNotSupportedKHR: return "ErrorImageUsageNotSupportedKHR"; + case Result::eErrorVideoPictureLayoutNotSupportedKHR: return "ErrorVideoPictureLayoutNotSupportedKHR"; + case Result::eErrorVideoProfileOperationNotSupportedKHR: return "ErrorVideoProfileOperationNotSupportedKHR"; + case Result::eErrorVideoProfileFormatNotSupportedKHR: return "ErrorVideoProfileFormatNotSupportedKHR"; + case Result::eErrorVideoProfileCodecNotSupportedKHR: return "ErrorVideoProfileCodecNotSupportedKHR"; + case Result::eErrorVideoStdVersionNotSupportedKHR: return "ErrorVideoStdVersionNotSupportedKHR"; + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; + case Result::eErrorNotPermittedKHR: return "ErrorNotPermittedKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case Result::eThreadIdleKHR: return "ThreadIdleKHR"; + case Result::eThreadDoneKHR: return "ThreadDoneKHR"; + case Result::eOperationDeferredKHR: return "OperationDeferredKHR"; + case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR"; + case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR"; + case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT"; + case Result::eErrorIncompatibleShaderBinaryEXT: return "ErrorIncompatibleShaderBinaryEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( StructureType value ) + { + switch ( value ) + { + case StructureType::eApplicationInfo: return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo"; + case StructureType::eSubmitInfo: return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange: return "MappedMemoryRange"; + case StructureType::eBindSparseInfo: return "BindSparseInfo"; + case StructureType::eFenceCreateInfo: return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo: return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo: return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo: return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier: return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2: return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2: return "FormatProperties2"; + case StructureType::eImageFormatProperties2: return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo: return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo: return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures"; + case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures: return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties: return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties: return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties: return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParametersFeatures: return "PhysicalDeviceShaderDrawParametersFeatures"; + case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features"; + case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties"; + case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features"; + case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties"; + case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo"; + case StructureType::eAttachmentDescription2: return "AttachmentDescription2"; + case StructureType::eAttachmentReference2: return "AttachmentReference2"; + case StructureType::eSubpassDescription2: return "SubpassDescription2"; + case StructureType::eSubpassDependency2: return "SubpassDependency2"; + case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2"; + case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo"; + case StructureType::eSubpassEndInfo: return "SubpassEndInfo"; + case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures"; + case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties"; + case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features"; + case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features"; + case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo: return "DescriptorSetLayoutBindingFlagsCreateInfo"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures"; + case StructureType::ePhysicalDeviceDescriptorIndexingProperties: return "PhysicalDeviceDescriptorIndexingProperties"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo: return "DescriptorSetVariableDescriptorCountAllocateInfo"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport: return "DescriptorSetVariableDescriptorCountLayoutSupport"; + case StructureType::ePhysicalDeviceDepthStencilResolveProperties: return "PhysicalDeviceDepthStencilResolveProperties"; + case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve"; + case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures"; + case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties: return "PhysicalDeviceSamplerFilterMinmaxProperties"; + case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo"; + case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures"; + case StructureType::ePhysicalDeviceImagelessFramebufferFeatures: return "PhysicalDeviceImagelessFramebufferFeatures"; + case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo"; + case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo"; + case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo"; + case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures: return "PhysicalDeviceUniformBufferStandardLayoutFeatures"; + case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures: return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures"; + case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures: return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures"; + case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout"; + case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout"; + case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures"; + case StructureType::ePhysicalDeviceTimelineSemaphoreProperties: return "PhysicalDeviceTimelineSemaphoreProperties"; + case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo"; + case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo"; + case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo"; + case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures: return "PhysicalDeviceBufferDeviceAddressFeatures"; + case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo"; + case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo"; + case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo"; + case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo"; + case StructureType::ePhysicalDeviceVulkan13Features: return "PhysicalDeviceVulkan13Features"; + case StructureType::ePhysicalDeviceVulkan13Properties: return "PhysicalDeviceVulkan13Properties"; + case StructureType::ePipelineCreationFeedbackCreateInfo: return "PipelineCreationFeedbackCreateInfo"; + case StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures: return "PhysicalDeviceShaderTerminateInvocationFeatures"; + case StructureType::ePhysicalDeviceToolProperties: return "PhysicalDeviceToolProperties"; + case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures: return "PhysicalDeviceShaderDemoteToHelperInvocationFeatures"; + case StructureType::ePhysicalDevicePrivateDataFeatures: return "PhysicalDevicePrivateDataFeatures"; + case StructureType::eDevicePrivateDataCreateInfo: return "DevicePrivateDataCreateInfo"; + case StructureType::ePrivateDataSlotCreateInfo: return "PrivateDataSlotCreateInfo"; + case StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures: return "PhysicalDevicePipelineCreationCacheControlFeatures"; + case StructureType::eMemoryBarrier2: return "MemoryBarrier2"; + case StructureType::eBufferMemoryBarrier2: return "BufferMemoryBarrier2"; + case StructureType::eImageMemoryBarrier2: return "ImageMemoryBarrier2"; + case StructureType::eDependencyInfo: return "DependencyInfo"; + case StructureType::eSubmitInfo2: return "SubmitInfo2"; + case StructureType::eSemaphoreSubmitInfo: return "SemaphoreSubmitInfo"; + case StructureType::eCommandBufferSubmitInfo: return "CommandBufferSubmitInfo"; + case StructureType::ePhysicalDeviceSynchronization2Features: return "PhysicalDeviceSynchronization2Features"; + case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures: return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures"; + case StructureType::ePhysicalDeviceImageRobustnessFeatures: return "PhysicalDeviceImageRobustnessFeatures"; + case StructureType::eCopyBufferInfo2: return "CopyBufferInfo2"; + case StructureType::eCopyImageInfo2: return "CopyImageInfo2"; + case StructureType::eCopyBufferToImageInfo2: return "CopyBufferToImageInfo2"; + case StructureType::eCopyImageToBufferInfo2: return "CopyImageToBufferInfo2"; + case StructureType::eBlitImageInfo2: return "BlitImageInfo2"; + case StructureType::eResolveImageInfo2: return "ResolveImageInfo2"; + case StructureType::eBufferCopy2: return "BufferCopy2"; + case StructureType::eImageCopy2: return "ImageCopy2"; + case StructureType::eImageBlit2: return "ImageBlit2"; + case StructureType::eBufferImageCopy2: return "BufferImageCopy2"; + case StructureType::eImageResolve2: return "ImageResolve2"; + case StructureType::ePhysicalDeviceSubgroupSizeControlProperties: return "PhysicalDeviceSubgroupSizeControlProperties"; + case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo: return "PipelineShaderStageRequiredSubgroupSizeCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupSizeControlFeatures: return "PhysicalDeviceSubgroupSizeControlFeatures"; + case StructureType::ePhysicalDeviceInlineUniformBlockFeatures: return "PhysicalDeviceInlineUniformBlockFeatures"; + case StructureType::ePhysicalDeviceInlineUniformBlockProperties: return "PhysicalDeviceInlineUniformBlockProperties"; + case StructureType::eWriteDescriptorSetInlineUniformBlock: return "WriteDescriptorSetInlineUniformBlock"; + case StructureType::eDescriptorPoolInlineUniformBlockCreateInfo: return "DescriptorPoolInlineUniformBlockCreateInfo"; + case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures: return "PhysicalDeviceTextureCompressionAstcHdrFeatures"; + case StructureType::eRenderingInfo: return "RenderingInfo"; + case StructureType::eRenderingAttachmentInfo: return "RenderingAttachmentInfo"; + case StructureType::ePipelineRenderingCreateInfo: return "PipelineRenderingCreateInfo"; + case StructureType::ePhysicalDeviceDynamicRenderingFeatures: return "PhysicalDeviceDynamicRenderingFeatures"; + case StructureType::eCommandBufferInheritanceRenderingInfo: return "CommandBufferInheritanceRenderingInfo"; + case StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures: return "PhysicalDeviceShaderIntegerDotProductFeatures"; + case StructureType::ePhysicalDeviceShaderIntegerDotProductProperties: return "PhysicalDeviceShaderIntegerDotProductProperties"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentProperties: return "PhysicalDeviceTexelBufferAlignmentProperties"; + case StructureType::eFormatProperties3: return "FormatProperties3"; + case StructureType::ePhysicalDeviceMaintenance4Features: return "PhysicalDeviceMaintenance4Features"; + case StructureType::ePhysicalDeviceMaintenance4Properties: return "PhysicalDeviceMaintenance4Properties"; + case StructureType::eDeviceBufferMemoryRequirements: return "DeviceBufferMemoryRequirements"; + case StructureType::eDeviceImageMemoryRequirements: return "DeviceImageMemoryRequirements"; + case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR"; +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; + case StructureType::eVideoProfileInfoKHR: return "VideoProfileInfoKHR"; + case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR"; + case StructureType::eVideoPictureResourceInfoKHR: return "VideoPictureResourceInfoKHR"; + case StructureType::eVideoSessionMemoryRequirementsKHR: return "VideoSessionMemoryRequirementsKHR"; + case StructureType::eBindVideoSessionMemoryInfoKHR: return "BindVideoSessionMemoryInfoKHR"; + case StructureType::eVideoSessionCreateInfoKHR: return "VideoSessionCreateInfoKHR"; + case StructureType::eVideoSessionParametersCreateInfoKHR: return "VideoSessionParametersCreateInfoKHR"; + case StructureType::eVideoSessionParametersUpdateInfoKHR: return "VideoSessionParametersUpdateInfoKHR"; + case StructureType::eVideoBeginCodingInfoKHR: return "VideoBeginCodingInfoKHR"; + case StructureType::eVideoEndCodingInfoKHR: return "VideoEndCodingInfoKHR"; + case StructureType::eVideoCodingControlInfoKHR: return "VideoCodingControlInfoKHR"; + case StructureType::eVideoReferenceSlotInfoKHR: return "VideoReferenceSlotInfoKHR"; + case StructureType::eQueueFamilyVideoPropertiesKHR: return "QueueFamilyVideoPropertiesKHR"; + case StructureType::eVideoProfileListInfoKHR: return "VideoProfileListInfoKHR"; + case StructureType::ePhysicalDeviceVideoFormatInfoKHR: return "PhysicalDeviceVideoFormatInfoKHR"; + case StructureType::eVideoFormatPropertiesKHR: return "VideoFormatPropertiesKHR"; + case StructureType::eQueueFamilyQueryResultStatusPropertiesKHR: return "QueueFamilyQueryResultStatusPropertiesKHR"; + case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR"; + case StructureType::eVideoDecodeCapabilitiesKHR: return "VideoDecodeCapabilitiesKHR"; + case StructureType::eVideoDecodeUsageInfoKHR: return "VideoDecodeUsageInfoKHR"; + case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT: return "PhysicalDeviceTransformFeedbackFeaturesEXT"; + case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT: return "PhysicalDeviceTransformFeedbackPropertiesEXT"; + case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT: return "PipelineRasterizationStateStreamCreateInfoEXT"; + case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX"; + case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX"; + case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX"; + case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX"; + case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX"; + case StructureType::eVideoEncodeH264CapabilitiesKHR: return "VideoEncodeH264CapabilitiesKHR"; + case StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR: return "VideoEncodeH264SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeH264SessionParametersAddInfoKHR: return "VideoEncodeH264SessionParametersAddInfoKHR"; + case StructureType::eVideoEncodeH264PictureInfoKHR: return "VideoEncodeH264PictureInfoKHR"; + case StructureType::eVideoEncodeH264DpbSlotInfoKHR: return "VideoEncodeH264DpbSlotInfoKHR"; + case StructureType::eVideoEncodeH264NaluSliceInfoKHR: return "VideoEncodeH264NaluSliceInfoKHR"; + case StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR: return "VideoEncodeH264GopRemainingFrameInfoKHR"; + case StructureType::eVideoEncodeH264ProfileInfoKHR: return "VideoEncodeH264ProfileInfoKHR"; + case StructureType::eVideoEncodeH264RateControlInfoKHR: return "VideoEncodeH264RateControlInfoKHR"; + case StructureType::eVideoEncodeH264RateControlLayerInfoKHR: return "VideoEncodeH264RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeH264SessionCreateInfoKHR: return "VideoEncodeH264SessionCreateInfoKHR"; + case StructureType::eVideoEncodeH264QualityLevelPropertiesKHR: return "VideoEncodeH264QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeH264SessionParametersGetInfoKHR: return "VideoEncodeH264SessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR: return "VideoEncodeH264SessionParametersFeedbackInfoKHR"; + case StructureType::eVideoEncodeH265CapabilitiesKHR: return "VideoEncodeH265CapabilitiesKHR"; + case StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR: return "VideoEncodeH265SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeH265SessionParametersAddInfoKHR: return "VideoEncodeH265SessionParametersAddInfoKHR"; + case StructureType::eVideoEncodeH265PictureInfoKHR: return "VideoEncodeH265PictureInfoKHR"; + case StructureType::eVideoEncodeH265DpbSlotInfoKHR: return "VideoEncodeH265DpbSlotInfoKHR"; + case StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR: return "VideoEncodeH265NaluSliceSegmentInfoKHR"; + case StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR: return "VideoEncodeH265GopRemainingFrameInfoKHR"; + case StructureType::eVideoEncodeH265ProfileInfoKHR: return "VideoEncodeH265ProfileInfoKHR"; + case StructureType::eVideoEncodeH265RateControlInfoKHR: return "VideoEncodeH265RateControlInfoKHR"; + case StructureType::eVideoEncodeH265RateControlLayerInfoKHR: return "VideoEncodeH265RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeH265SessionCreateInfoKHR: return "VideoEncodeH265SessionCreateInfoKHR"; + case StructureType::eVideoEncodeH265QualityLevelPropertiesKHR: return "VideoEncodeH265QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeH265SessionParametersGetInfoKHR: return "VideoEncodeH265SessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR: return "VideoEncodeH265SessionParametersFeedbackInfoKHR"; + case StructureType::eVideoDecodeH264CapabilitiesKHR: return "VideoDecodeH264CapabilitiesKHR"; + case StructureType::eVideoDecodeH264PictureInfoKHR: return "VideoDecodeH264PictureInfoKHR"; + case StructureType::eVideoDecodeH264ProfileInfoKHR: return "VideoDecodeH264ProfileInfoKHR"; + case StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR: return "VideoDecodeH264SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeH264SessionParametersAddInfoKHR: return "VideoDecodeH264SessionParametersAddInfoKHR"; + case StructureType::eVideoDecodeH264DpbSlotInfoKHR: return "VideoDecodeH264DpbSlotInfoKHR"; + case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; + case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; + case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT"; + case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; + case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV: return "PhysicalDeviceCornerSampledImageFeaturesNV"; + case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT"; +#if defined( VK_USE_PLATFORM_VI_NN ) + case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN"; +#endif /*VK_USE_PLATFORM_VI_NN*/ + case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; + case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; + case StructureType::ePipelineRobustnessCreateInfoEXT: return "PipelineRobustnessCreateInfoEXT"; + case StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT: return "PhysicalDevicePipelineRobustnessFeaturesEXT"; + case StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT: return "PhysicalDevicePipelineRobustnessPropertiesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; + case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR"; + case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: return "CommandBufferInheritanceConditionalRenderingInfoEXT"; + case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: return "PhysicalDeviceConditionalRenderingFeaturesEXT"; + case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; + case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV: return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT: return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT: return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT"; + case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT: return "PipelineRasterizationDepthClipStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG: return "PhysicalDeviceRelaxedLineRasterizationFeaturesIMG"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR: return "PhysicalDevicePerformanceQueryFeaturesKHR"; + case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR: return "PhysicalDevicePerformanceQueryPropertiesKHR"; + case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR"; + case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR"; + case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR"; + case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR"; + case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR"; + case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR"; + case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR"; + case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR"; + case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR"; + case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR"; +#if defined( VK_USE_PLATFORM_IOS_MVK ) + case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK"; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID: return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID: return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; + case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID: return "AndroidHardwareBufferFormatProperties2ANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX: return "PhysicalDeviceShaderEnqueueFeaturesAMDX"; + case StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX: return "PhysicalDeviceShaderEnqueuePropertiesAMDX"; + case StructureType::eExecutionGraphPipelineScratchSizeAMDX: return "ExecutionGraphPipelineScratchSizeAMDX"; + case StructureType::eExecutionGraphPipelineCreateInfoAMDX: return "ExecutionGraphPipelineCreateInfoAMDX"; + case StructureType::ePipelineShaderStageNodeCreateInfoAMDX: return "PipelineShaderStageNodeCreateInfoAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT: return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT: return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT: return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT: return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureKHR: return "WriteDescriptorSetAccelerationStructureKHR"; + case StructureType::eAccelerationStructureBuildGeometryInfoKHR: return "AccelerationStructureBuildGeometryInfoKHR"; + case StructureType::eAccelerationStructureDeviceAddressInfoKHR: return "AccelerationStructureDeviceAddressInfoKHR"; + case StructureType::eAccelerationStructureGeometryAabbsDataKHR: return "AccelerationStructureGeometryAabbsDataKHR"; + case StructureType::eAccelerationStructureGeometryInstancesDataKHR: return "AccelerationStructureGeometryInstancesDataKHR"; + case StructureType::eAccelerationStructureGeometryTrianglesDataKHR: return "AccelerationStructureGeometryTrianglesDataKHR"; + case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR"; + case StructureType::eAccelerationStructureVersionInfoKHR: return "AccelerationStructureVersionInfoKHR"; + case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR"; + case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR"; + case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR"; + case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR: return "PhysicalDeviceAccelerationStructureFeaturesKHR"; + case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR: return "PhysicalDeviceAccelerationStructurePropertiesKHR"; + case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR"; + case StructureType::eAccelerationStructureBuildSizesInfoKHR: return "AccelerationStructureBuildSizesInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR: return "PhysicalDeviceRayTracingPipelineFeaturesKHR"; + case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR: return "PhysicalDeviceRayTracingPipelinePropertiesKHR"; + case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR"; + case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR"; + case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR"; + case StructureType::ePhysicalDeviceRayQueryFeaturesKHR: return "PhysicalDeviceRayQueryFeaturesKHR"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV: return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV: return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; + case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT"; + case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT: return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; + case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT: return "ImageDrmFormatModifierExplicitCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT"; + case StructureType::eDrmFormatModifierPropertiesList2EXT: return "DrmFormatModifierPropertiesList2EXT"; + case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR: return "PhysicalDevicePortabilitySubsetFeaturesKHR"; + case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR: return "PhysicalDevicePortabilitySubsetPropertiesKHR"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV: return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV: return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV: return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV: return "GeometryNV"; + case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV: return "GeometryAabbNV"; + case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureNV: return "WriteDescriptorSetAccelerationStructureNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV: return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV: return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT: return "PhysicalDeviceImageViewImageFormatInfoEXT"; + case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT: return "FilterCubicImageViewImageFormatPropertiesEXT"; + case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR"; + case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; + case StructureType::eVideoDecodeH265CapabilitiesKHR: return "VideoDecodeH265CapabilitiesKHR"; + case StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR: return "VideoDecodeH265SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeH265SessionParametersAddInfoKHR: return "VideoDecodeH265SessionParametersAddInfoKHR"; + case StructureType::eVideoDecodeH265ProfileInfoKHR: return "VideoDecodeH265ProfileInfoKHR"; + case StructureType::eVideoDecodeH265PictureInfoKHR: return "VideoDecodeH265PictureInfoKHR"; + case StructureType::eVideoDecodeH265DpbSlotInfoKHR: return "VideoDecodeH265DpbSlotInfoKHR"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR: return "DeviceQueueGlobalPriorityCreateInfoKHR"; + case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR: return "PhysicalDeviceGlobalPriorityQueryFeaturesKHR"; + case StructureType::eQueueFamilyGlobalPriorityPropertiesKHR: return "QueueFamilyGlobalPriorityPropertiesKHR"; + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; +#if defined( VK_USE_PLATFORM_GGP ) + case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; +#endif /*VK_USE_PLATFORM_GGP*/ + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; + case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: return "PhysicalDeviceShaderImageFootprintFeaturesNV"; + case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: return "PipelineViewportExclusiveScissorStateCreateInfoNV"; + case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; + case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; + case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; + case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; + case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; + case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL"; + case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL"; + case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL"; + case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL"; + case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT"; + case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD"; + case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; + case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR"; + case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: return "PipelineFragmentShadingRateStateCreateInfoKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: return "PhysicalDeviceFragmentShadingRatePropertiesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: return "PhysicalDeviceFragmentShadingRateFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR"; + case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; + case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; + case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT: return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT"; + case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT"; + case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT"; + case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT"; + case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR"; + case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV: return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT: return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; + case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT"; + case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT"; + case StructureType::ePhysicalDevicePresentWaitFeaturesKHR: return "PhysicalDevicePresentWaitFeaturesKHR"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV: return "PhysicalDeviceCooperativeMatrixFeaturesNV"; + case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV: return "PhysicalDeviceCooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV: return "PhysicalDeviceCoverageReductionModeFeaturesNV"; + case StructureType::ePipelineCoverageReductionStateCreateInfoNV: return "PipelineCoverageReductionStateCreateInfoNV"; + case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV"; + case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT: return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; + case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT: return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; + case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT: return "PhysicalDeviceProvokingVertexFeaturesEXT"; + case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT: return "PipelineRasterizationProvokingVertexStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT: return "PhysicalDeviceProvokingVertexPropertiesEXT"; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT"; + case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT: return "SurfaceCapabilitiesFullScreenExclusiveEXT"; + case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT"; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT: return "PhysicalDeviceLineRasterizationFeaturesEXT"; + case StructureType::ePipelineRasterizationLineStateCreateInfoEXT: return "PipelineRasterizationLineStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT: return "PhysicalDeviceLineRasterizationPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT: return "PhysicalDeviceShaderAtomicFloatFeaturesEXT"; + case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT: return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; + case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; + case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR"; + case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR"; + case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; + case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; + case StructureType::ePipelineExecutableInternalRepresentationKHR: return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT: return "PhysicalDeviceHostImageCopyFeaturesEXT"; + case StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT: return "PhysicalDeviceHostImageCopyPropertiesEXT"; + case StructureType::eMemoryToImageCopyEXT: return "MemoryToImageCopyEXT"; + case StructureType::eImageToMemoryCopyEXT: return "ImageToMemoryCopyEXT"; + case StructureType::eCopyImageToMemoryInfoEXT: return "CopyImageToMemoryInfoEXT"; + case StructureType::eCopyMemoryToImageInfoEXT: return "CopyMemoryToImageInfoEXT"; + case StructureType::eHostImageLayoutTransitionInfoEXT: return "HostImageLayoutTransitionInfoEXT"; + case StructureType::eCopyImageToImageInfoEXT: return "CopyImageToImageInfoEXT"; + case StructureType::eSubresourceHostMemcpySizeEXT: return "SubresourceHostMemcpySizeEXT"; + case StructureType::eHostImageCopyDevicePerformanceQueryEXT: return "HostImageCopyDevicePerformanceQueryEXT"; + case StructureType::eMemoryMapInfoKHR: return "MemoryMapInfoKHR"; + case StructureType::eMemoryUnmapInfoKHR: return "MemoryUnmapInfoKHR"; + case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT"; + case StructureType::eSurfacePresentModeEXT: return "SurfacePresentModeEXT"; + case StructureType::eSurfacePresentScalingCapabilitiesEXT: return "SurfacePresentScalingCapabilitiesEXT"; + case StructureType::eSurfacePresentModeCompatibilityEXT: return "SurfacePresentModeCompatibilityEXT"; + case StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT: return "PhysicalDeviceSwapchainMaintenance1FeaturesEXT"; + case StructureType::eSwapchainPresentFenceInfoEXT: return "SwapchainPresentFenceInfoEXT"; + case StructureType::eSwapchainPresentModesCreateInfoEXT: return "SwapchainPresentModesCreateInfoEXT"; + case StructureType::eSwapchainPresentModeInfoEXT: return "SwapchainPresentModeInfoEXT"; + case StructureType::eSwapchainPresentScalingCreateInfoEXT: return "SwapchainPresentScalingCreateInfoEXT"; + case StructureType::eReleaseSwapchainImagesInfoEXT: return "ReleaseSwapchainImagesInfoEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV: return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; + case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV"; + case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV"; + case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV"; + case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV"; + case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV: return "GeneratedCommandsMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV: return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV"; + case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV: return "PhysicalDeviceInheritedViewportScissorFeaturesNV"; + case StructureType::eCommandBufferInheritanceViewportScissorInfoNV: return "CommandBufferInheritanceViewportScissorInfoNV"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT: return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; + case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM: return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; + case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT: return "PhysicalDeviceDepthBiasControlFeaturesEXT"; + case StructureType::eDepthBiasInfoEXT: return "DepthBiasInfoEXT"; + case StructureType::eDepthBiasRepresentationInfoEXT: return "DepthBiasRepresentationInfoEXT"; + case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT: return "PhysicalDeviceDeviceMemoryReportFeaturesEXT"; + case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT"; + case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT"; + case StructureType::ePhysicalDeviceRobustness2FeaturesEXT: return "PhysicalDeviceRobustness2FeaturesEXT"; + case StructureType::ePhysicalDeviceRobustness2PropertiesEXT: return "PhysicalDeviceRobustness2PropertiesEXT"; + case StructureType::eSamplerCustomBorderColorCreateInfoEXT: return "SamplerCustomBorderColorCreateInfoEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT: return "PhysicalDeviceCustomBorderColorPropertiesEXT"; + case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT: return "PhysicalDeviceCustomBorderColorFeaturesEXT"; + case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePhysicalDevicePresentBarrierFeaturesNV: return "PhysicalDevicePresentBarrierFeaturesNV"; + case StructureType::eSurfaceCapabilitiesPresentBarrierNV: return "SurfaceCapabilitiesPresentBarrierNV"; + case StructureType::eSwapchainPresentBarrierCreateInfoNV: return "SwapchainPresentBarrierCreateInfoNV"; + case StructureType::ePresentIdKHR: return "PresentIdKHR"; + case StructureType::ePhysicalDevicePresentIdFeaturesKHR: return "PhysicalDevicePresentIdFeaturesKHR"; + case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR"; + case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR"; + case StructureType::eVideoEncodeRateControlLayerInfoKHR: return "VideoEncodeRateControlLayerInfoKHR"; + case StructureType::eVideoEncodeCapabilitiesKHR: return "VideoEncodeCapabilitiesKHR"; + case StructureType::eVideoEncodeUsageInfoKHR: return "VideoEncodeUsageInfoKHR"; + case StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR: return "QueryPoolVideoEncodeFeedbackCreateInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR: return "PhysicalDeviceVideoEncodeQualityLevelInfoKHR"; + case StructureType::eVideoEncodeQualityLevelPropertiesKHR: return "VideoEncodeQualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeQualityLevelInfoKHR: return "VideoEncodeQualityLevelInfoKHR"; + case StructureType::eVideoEncodeSessionParametersGetInfoKHR: return "VideoEncodeSessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR: return "VideoEncodeSessionParametersFeedbackInfoKHR"; + case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; + case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eCudaModuleCreateInfoNV: return "CudaModuleCreateInfoNV"; + case StructureType::eCudaFunctionCreateInfoNV: return "CudaFunctionCreateInfoNV"; + case StructureType::eCudaLaunchInfoNV: return "CudaLaunchInfoNV"; + case StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV: return "PhysicalDeviceCudaKernelLaunchFeaturesNV"; + case StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV: return "PhysicalDeviceCudaKernelLaunchPropertiesNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eQueryLowLatencySupportNV: return "QueryLowLatencySupportNV"; +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eExportMetalObjectCreateInfoEXT: return "ExportMetalObjectCreateInfoEXT"; + case StructureType::eExportMetalObjectsInfoEXT: return "ExportMetalObjectsInfoEXT"; + case StructureType::eExportMetalDeviceInfoEXT: return "ExportMetalDeviceInfoEXT"; + case StructureType::eExportMetalCommandQueueInfoEXT: return "ExportMetalCommandQueueInfoEXT"; + case StructureType::eExportMetalBufferInfoEXT: return "ExportMetalBufferInfoEXT"; + case StructureType::eImportMetalBufferInfoEXT: return "ImportMetalBufferInfoEXT"; + case StructureType::eExportMetalTextureInfoEXT: return "ExportMetalTextureInfoEXT"; + case StructureType::eImportMetalTextureInfoEXT: return "ImportMetalTextureInfoEXT"; + case StructureType::eExportMetalIoSurfaceInfoEXT: return "ExportMetalIoSurfaceInfoEXT"; + case StructureType::eImportMetalIoSurfaceInfoEXT: return "ImportMetalIoSurfaceInfoEXT"; + case StructureType::eExportMetalSharedEventInfoEXT: return "ExportMetalSharedEventInfoEXT"; + case StructureType::eImportMetalSharedEventInfoEXT: return "ImportMetalSharedEventInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; + case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; + case StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT: return "PhysicalDeviceDescriptorBufferPropertiesEXT"; + case StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT: return "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT"; + case StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT: return "PhysicalDeviceDescriptorBufferFeaturesEXT"; + case StructureType::eDescriptorAddressInfoEXT: return "DescriptorAddressInfoEXT"; + case StructureType::eDescriptorGetInfoEXT: return "DescriptorGetInfoEXT"; + case StructureType::eBufferCaptureDescriptorDataInfoEXT: return "BufferCaptureDescriptorDataInfoEXT"; + case StructureType::eImageCaptureDescriptorDataInfoEXT: return "ImageCaptureDescriptorDataInfoEXT"; + case StructureType::eImageViewCaptureDescriptorDataInfoEXT: return "ImageViewCaptureDescriptorDataInfoEXT"; + case StructureType::eSamplerCaptureDescriptorDataInfoEXT: return "SamplerCaptureDescriptorDataInfoEXT"; + case StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT: return "OpaqueCaptureDescriptorDataCreateInfoEXT"; + case StructureType::eDescriptorBufferBindingInfoEXT: return "DescriptorBufferBindingInfoEXT"; + case StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT: return "DescriptorBufferBindingPushDescriptorBufferHandleEXT"; + case StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT: return "AccelerationStructureCaptureDescriptorDataInfoEXT"; + case StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT: return "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT"; + case StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT: return "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT"; + case StructureType::eGraphicsPipelineLibraryCreateInfoEXT: return "GraphicsPipelineLibraryCreateInfoEXT"; + case StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD: return "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR: return "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR: return "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR"; + case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR: return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV: return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV: return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV"; + case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV: return "PipelineFragmentShadingRateEnumStateCreateInfoNV"; + case StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV: return "AccelerationStructureGeometryMotionTrianglesDataNV"; + case StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV: return "PhysicalDeviceRayTracingMotionBlurFeaturesNV"; + case StructureType::eAccelerationStructureMotionInfoNV: return "AccelerationStructureMotionInfoNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesEXT: return "PhysicalDeviceMeshShaderFeaturesEXT"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesEXT: return "PhysicalDeviceMeshShaderPropertiesEXT"; + case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT: return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT: return "PhysicalDeviceFragmentDensityMap2FeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT: return "PhysicalDeviceFragmentDensityMap2PropertiesEXT"; + case StructureType::eCopyCommandTransformInfoQCOM: return "CopyCommandTransformInfoQCOM"; + case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR"; + case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT: return "PhysicalDeviceImageCompressionControlFeaturesEXT"; + case StructureType::eImageCompressionControlEXT: return "ImageCompressionControlEXT"; + case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT"; + case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT"; + case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceFaultFeaturesEXT: return "PhysicalDeviceFaultFeaturesEXT"; + case StructureType::eDeviceFaultCountsEXT: return "DeviceFaultCountsEXT"; + case StructureType::eDeviceFaultInfoEXT: return "DeviceFaultInfoEXT"; + case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT: return "PhysicalDeviceRgba10X6FormatsFeaturesEXT"; +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT"; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT: return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT"; + case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT"; + case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT"; + case StructureType::ePhysicalDeviceDrmPropertiesEXT: return "PhysicalDeviceDrmPropertiesEXT"; + case StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT: return "PhysicalDeviceAddressBindingReportFeaturesEXT"; + case StructureType::eDeviceAddressBindingCallbackDataEXT: return "DeviceAddressBindingCallbackDataEXT"; + case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT: return "PhysicalDeviceDepthClipControlFeaturesEXT"; + case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT: return "PipelineViewportDepthClipControlCreateInfoEXT"; + case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT: return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA"; + case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA"; + case StructureType::eMemoryGetZirconHandleInfoFUCHSIA: return "MemoryGetZirconHandleInfoFUCHSIA"; + case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA: return "ImportSemaphoreZirconHandleInfoFUCHSIA"; + case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA: return "SemaphoreGetZirconHandleInfoFUCHSIA"; + case StructureType::eBufferCollectionCreateInfoFUCHSIA: return "BufferCollectionCreateInfoFUCHSIA"; + case StructureType::eImportMemoryBufferCollectionFUCHSIA: return "ImportMemoryBufferCollectionFUCHSIA"; + case StructureType::eBufferCollectionImageCreateInfoFUCHSIA: return "BufferCollectionImageCreateInfoFUCHSIA"; + case StructureType::eBufferCollectionPropertiesFUCHSIA: return "BufferCollectionPropertiesFUCHSIA"; + case StructureType::eBufferConstraintsInfoFUCHSIA: return "BufferConstraintsInfoFUCHSIA"; + case StructureType::eBufferCollectionBufferCreateInfoFUCHSIA: return "BufferCollectionBufferCreateInfoFUCHSIA"; + case StructureType::eImageConstraintsInfoFUCHSIA: return "ImageConstraintsInfoFUCHSIA"; + case StructureType::eImageFormatConstraintsInfoFUCHSIA: return "ImageFormatConstraintsInfoFUCHSIA"; + case StructureType::eSysmemColorSpaceFUCHSIA: return "SysmemColorSpaceFUCHSIA"; + case StructureType::eBufferCollectionConstraintsInfoFUCHSIA: return "BufferCollectionConstraintsInfoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case StructureType::eSubpassShadingPipelineCreateInfoHUAWEI: return "SubpassShadingPipelineCreateInfoHUAWEI"; + case StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI: return "PhysicalDeviceSubpassShadingFeaturesHUAWEI"; + case StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI: return "PhysicalDeviceSubpassShadingPropertiesHUAWEI"; + case StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI: return "PhysicalDeviceInvocationMaskFeaturesHUAWEI"; + case StructureType::eMemoryGetRemoteAddressInfoNV: return "MemoryGetRemoteAddressInfoNV"; + case StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV: return "PhysicalDeviceExternalMemoryRdmaFeaturesNV"; + case StructureType::ePipelinePropertiesIdentifierEXT: return "PipelinePropertiesIdentifierEXT"; + case StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT: return "PhysicalDevicePipelinePropertiesFeaturesEXT"; + case StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT: return "PhysicalDeviceFrameBoundaryFeaturesEXT"; + case StructureType::eFrameBoundaryEXT: return "FrameBoundaryEXT"; + case StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT: return "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT"; + case StructureType::eSubpassResolvePerformanceQueryEXT: return "SubpassResolvePerformanceQueryEXT"; + case StructureType::eMultisampledRenderToSingleSampledInfoEXT: return "MultisampledRenderToSingleSampledInfoEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT: return "PhysicalDeviceExtendedDynamicState2FeaturesEXT"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case StructureType::eScreenSurfaceCreateInfoQNX: return "ScreenSurfaceCreateInfoQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT: return "PhysicalDeviceColorWriteEnableFeaturesEXT"; + case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT"; + case StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT: return "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT"; + case StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR: return "PhysicalDeviceRayTracingMaintenance1FeaturesKHR"; + case StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT: return "PhysicalDeviceImageViewMinLodFeaturesEXT"; + case StructureType::eImageViewMinLodCreateInfoEXT: return "ImageViewMinLodCreateInfoEXT"; + case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT"; + case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT"; + case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT: return "PhysicalDeviceImage2DViewOf3DFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT: return "PhysicalDeviceShaderTileImageFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT: return "PhysicalDeviceShaderTileImagePropertiesEXT"; + case StructureType::eMicromapBuildInfoEXT: return "MicromapBuildInfoEXT"; + case StructureType::eMicromapVersionInfoEXT: return "MicromapVersionInfoEXT"; + case StructureType::eCopyMicromapInfoEXT: return "CopyMicromapInfoEXT"; + case StructureType::eCopyMicromapToMemoryInfoEXT: return "CopyMicromapToMemoryInfoEXT"; + case StructureType::eCopyMemoryToMicromapInfoEXT: return "CopyMemoryToMicromapInfoEXT"; + case StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT: return "PhysicalDeviceOpacityMicromapFeaturesEXT"; + case StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT: return "PhysicalDeviceOpacityMicromapPropertiesEXT"; + case StructureType::eMicromapCreateInfoEXT: return "MicromapCreateInfoEXT"; + case StructureType::eMicromapBuildSizesInfoEXT: return "MicromapBuildSizesInfoEXT"; + case StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT: return "AccelerationStructureTrianglesOpacityMicromapEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV: return "PhysicalDeviceDisplacementMicromapFeaturesNV"; + case StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV: return "PhysicalDeviceDisplacementMicromapPropertiesNV"; + case StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV: return "AccelerationStructureTrianglesDisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI: return "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI"; + case StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI: return "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI"; + case StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI: return "PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI"; + case StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT: return "PhysicalDeviceBorderColorSwizzleFeaturesEXT"; + case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT"; + case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesARM: return "PhysicalDeviceShaderCorePropertiesARM"; + case StructureType::eDeviceQueueShaderCoreControlCreateInfoARM: return "DeviceQueueShaderCoreControlCreateInfoARM"; + case StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM: return "PhysicalDeviceSchedulingControlsFeaturesARM"; + case StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM: return "PhysicalDeviceSchedulingControlsPropertiesARM"; + case StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT: return "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT"; + case StructureType::eImageViewSlicedCreateInfoEXT: return "ImageViewSlicedCreateInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE: return "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE"; + case StructureType::eDescriptorSetBindingReferenceVALVE: return "DescriptorSetBindingReferenceVALVE"; + case StructureType::eDescriptorSetLayoutHostMappingInfoVALVE: return "DescriptorSetLayoutHostMappingInfoVALVE"; + case StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT: return "PhysicalDeviceDepthClampZeroOneFeaturesEXT"; + case StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT: return "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT"; + case StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM: return "PhysicalDeviceRenderPassStripedFeaturesARM"; + case StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM: return "PhysicalDeviceRenderPassStripedPropertiesARM"; + case StructureType::eRenderPassStripeBeginInfoARM: return "RenderPassStripeBeginInfoARM"; + case StructureType::eRenderPassStripeInfoARM: return "RenderPassStripeInfoARM"; + case StructureType::eRenderPassStripeSubmitInfoARM: return "RenderPassStripeSubmitInfoARM"; + case StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM: return "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM"; + case StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM: return "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM"; + case StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM: return "SubpassFragmentDensityMapOffsetEndInfoQCOM"; + case StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV: return "PhysicalDeviceCopyMemoryIndirectFeaturesNV"; + case StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV: return "PhysicalDeviceCopyMemoryIndirectPropertiesNV"; + case StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV: return "PhysicalDeviceMemoryDecompressionFeaturesNV"; + case StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV: return "PhysicalDeviceMemoryDecompressionPropertiesNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV: return "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV"; + case StructureType::eComputePipelineIndirectBufferInfoNV: return "ComputePipelineIndirectBufferInfoNV"; + case StructureType::ePipelineIndirectDeviceAddressInfoNV: return "PipelineIndirectDeviceAddressInfoNV"; + case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV: return "PhysicalDeviceLinearColorAttachmentFeaturesNV"; + case StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT: return "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT"; + case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM: return "PhysicalDeviceImageProcessingFeaturesQCOM"; + case StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM: return "PhysicalDeviceImageProcessingPropertiesQCOM"; + case StructureType::eImageViewSampleWeightCreateInfoQCOM: return "ImageViewSampleWeightCreateInfoQCOM"; + case StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT: return "PhysicalDeviceNestedCommandBufferFeaturesEXT"; + case StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT: return "PhysicalDeviceNestedCommandBufferPropertiesEXT"; + case StructureType::eExternalMemoryAcquireUnmodifiedEXT: return "ExternalMemoryAcquireUnmodifiedEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT: return "PhysicalDeviceExtendedDynamicState3FeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT: return "PhysicalDeviceExtendedDynamicState3PropertiesEXT"; + case StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT: return "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT"; + case StructureType::eRenderPassCreationControlEXT: return "RenderPassCreationControlEXT"; + case StructureType::eRenderPassCreationFeedbackCreateInfoEXT: return "RenderPassCreationFeedbackCreateInfoEXT"; + case StructureType::eRenderPassSubpassFeedbackCreateInfoEXT: return "RenderPassSubpassFeedbackCreateInfoEXT"; + case StructureType::eDirectDriverLoadingInfoLUNARG: return "DirectDriverLoadingInfoLUNARG"; + case StructureType::eDirectDriverLoadingListLUNARG: return "DirectDriverLoadingListLUNARG"; + case StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT: return "PhysicalDeviceShaderModuleIdentifierFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT: return "PhysicalDeviceShaderModuleIdentifierPropertiesEXT"; + case StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT: return "PipelineShaderStageModuleIdentifierCreateInfoEXT"; + case StructureType::eShaderModuleIdentifierEXT: return "ShaderModuleIdentifierEXT"; + case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT: return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT"; + case StructureType::ePhysicalDeviceOpticalFlowFeaturesNV: return "PhysicalDeviceOpticalFlowFeaturesNV"; + case StructureType::ePhysicalDeviceOpticalFlowPropertiesNV: return "PhysicalDeviceOpticalFlowPropertiesNV"; + case StructureType::eOpticalFlowImageFormatInfoNV: return "OpticalFlowImageFormatInfoNV"; + case StructureType::eOpticalFlowImageFormatPropertiesNV: return "OpticalFlowImageFormatPropertiesNV"; + case StructureType::eOpticalFlowSessionCreateInfoNV: return "OpticalFlowSessionCreateInfoNV"; + case StructureType::eOpticalFlowExecuteInfoNV: return "OpticalFlowExecuteInfoNV"; + case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV"; + case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT"; + case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID: return "PhysicalDeviceExternalFormatResolveFeaturesANDROID"; + case StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID: return "PhysicalDeviceExternalFormatResolvePropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID: return "AndroidHardwareBufferFormatResolvePropertiesANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case StructureType::ePhysicalDeviceMaintenance5FeaturesKHR: return "PhysicalDeviceMaintenance5FeaturesKHR"; + case StructureType::ePhysicalDeviceMaintenance5PropertiesKHR: return "PhysicalDeviceMaintenance5PropertiesKHR"; + case StructureType::eRenderingAreaInfoKHR: return "RenderingAreaInfoKHR"; + case StructureType::eDeviceImageSubresourceInfoKHR: return "DeviceImageSubresourceInfoKHR"; + case StructureType::eSubresourceLayout2KHR: return "SubresourceLayout2KHR"; + case StructureType::eImageSubresource2KHR: return "ImageSubresource2KHR"; + case StructureType::ePipelineCreateFlags2CreateInfoKHR: return "PipelineCreateFlags2CreateInfoKHR"; + case StructureType::eBufferUsageFlags2CreateInfoKHR: return "BufferUsageFlags2CreateInfoKHR"; + case StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR: return "PhysicalDeviceRayTracingPositionFetchFeaturesKHR"; + case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT"; + case StructureType::eShaderCreateInfoEXT: return "ShaderCreateInfoEXT"; + case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM"; + case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM"; + case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC"; + case StructureType::eAmigoProfilingSubmitInfoSEC: return "AmigoProfilingSubmitInfoSEC"; + case StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM: return "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM"; + case StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV: return "PhysicalDeviceRayTracingInvocationReorderFeaturesNV"; + case StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV: return "PhysicalDeviceRayTracingInvocationReorderPropertiesNV"; + case StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV: return "PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV"; + case StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV: return "PhysicalDeviceExtendedSparseAddressSpacePropertiesNV"; + case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT: return "PhysicalDeviceMutableDescriptorTypeFeaturesEXT"; + case StructureType::eMutableDescriptorTypeCreateInfoEXT: return "MutableDescriptorTypeCreateInfoEXT"; + case StructureType::eLayerSettingsCreateInfoEXT: return "LayerSettingsCreateInfoEXT"; + case StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM: return "PhysicalDeviceShaderCoreBuiltinsFeaturesARM"; + case StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM: return "PhysicalDeviceShaderCoreBuiltinsPropertiesARM"; + case StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT: return "PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT"; + case StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT: return "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT"; + case StructureType::eLatencySleepModeInfoNV: return "LatencySleepModeInfoNV"; + case StructureType::eLatencySleepInfoNV: return "LatencySleepInfoNV"; + case StructureType::eSetLatencyMarkerInfoNV: return "SetLatencyMarkerInfoNV"; + case StructureType::eGetLatencyMarkerInfoNV: return "GetLatencyMarkerInfoNV"; + case StructureType::eLatencyTimingsFrameReportNV: return "LatencyTimingsFrameReportNV"; + case StructureType::eLatencySubmissionPresentIdNV: return "LatencySubmissionPresentIdNV"; + case StructureType::eOutOfBandQueueTypeInfoNV: return "OutOfBandQueueTypeInfoNV"; + case StructureType::eSwapchainLatencyCreateInfoNV: return "SwapchainLatencyCreateInfoNV"; + case StructureType::eLatencySurfaceCapabilitiesNV: return "LatencySurfaceCapabilitiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR: return "PhysicalDeviceCooperativeMatrixFeaturesKHR"; + case StructureType::eCooperativeMatrixPropertiesKHR: return "CooperativeMatrixPropertiesKHR"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR: return "PhysicalDeviceCooperativeMatrixPropertiesKHR"; + case StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM: return "PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM"; + case StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM: return "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM"; + case StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR: return "PhysicalDeviceVideoMaintenance1FeaturesKHR"; + case StructureType::eVideoInlineQueryInfoKHR: return "VideoInlineQueryInfoKHR"; + case StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV: return "PhysicalDevicePerStageDescriptorSetFeaturesNV"; + case StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM: return "PhysicalDeviceImageProcessing2FeaturesQCOM"; + case StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM: return "PhysicalDeviceImageProcessing2PropertiesQCOM"; + case StructureType::eSamplerBlockMatchWindowCreateInfoQCOM: return "SamplerBlockMatchWindowCreateInfoQCOM"; + case StructureType::eSamplerCubicWeightsCreateInfoQCOM: return "SamplerCubicWeightsCreateInfoQCOM"; + case StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM: return "PhysicalDeviceCubicWeightsFeaturesQCOM"; + case StructureType::eBlitImageCubicWeightsInfoQCOM: return "BlitImageCubicWeightsInfoQCOM"; + case StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM: return "PhysicalDeviceYcbcrDegammaFeaturesQCOM"; + case StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM: return "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM"; + case StructureType::ePhysicalDeviceCubicClampFeaturesQCOM: return "PhysicalDeviceCubicClampFeaturesQCOM"; + case StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesKHR: return "PhysicalDeviceVertexAttributeDivisorPropertiesKHR"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoKHR: return "PipelineVertexInputDivisorStateCreateInfoKHR"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesKHR: return "PhysicalDeviceVertexAttributeDivisorFeaturesKHR"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case StructureType::eScreenBufferPropertiesQNX: return "ScreenBufferPropertiesQNX"; + case StructureType::eScreenBufferFormatPropertiesQNX: return "ScreenBufferFormatPropertiesQNX"; + case StructureType::eImportScreenBufferInfoQNX: return "ImportScreenBufferInfoQNX"; + case StructureType::eExternalFormatQNX: return "ExternalFormatQNX"; + case StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX: return "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + case StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT: return "PhysicalDeviceLayeredDriverPropertiesMSFT"; + case StructureType::eCalibratedTimestampInfoKHR: return "CalibratedTimestampInfoKHR"; + case StructureType::ePhysicalDeviceMaintenance6FeaturesKHR: return "PhysicalDeviceMaintenance6FeaturesKHR"; + case StructureType::ePhysicalDeviceMaintenance6PropertiesKHR: return "PhysicalDeviceMaintenance6PropertiesKHR"; + case StructureType::eBindMemoryStatusKHR: return "BindMemoryStatusKHR"; + case StructureType::eBindDescriptorSetsInfoKHR: return "BindDescriptorSetsInfoKHR"; + case StructureType::ePushConstantsInfoKHR: return "PushConstantsInfoKHR"; + case StructureType::ePushDescriptorSetInfoKHR: return "PushDescriptorSetInfoKHR"; + case StructureType::ePushDescriptorSetWithTemplateInfoKHR: return "PushDescriptorSetWithTemplateInfoKHR"; + case StructureType::eSetDescriptorBufferOffsetsInfoEXT: return "SetDescriptorBufferOffsetsInfoEXT"; + case StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT: return "BindDescriptorBufferEmbeddedSamplersInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV: return "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value ) + { + switch ( value ) + { + case PipelineCacheHeaderVersion::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ObjectType value ) + { + switch ( value ) + { + case ObjectType::eUnknown: return "Unknown"; + case ObjectType::eInstance: return "Instance"; + case ObjectType::ePhysicalDevice: return "PhysicalDevice"; + case ObjectType::eDevice: return "Device"; + case ObjectType::eQueue: return "Queue"; + case ObjectType::eSemaphore: return "Semaphore"; + case ObjectType::eCommandBuffer: return "CommandBuffer"; + case ObjectType::eFence: return "Fence"; + case ObjectType::eDeviceMemory: return "DeviceMemory"; + case ObjectType::eBuffer: return "Buffer"; + case ObjectType::eImage: return "Image"; + case ObjectType::eEvent: return "Event"; + case ObjectType::eQueryPool: return "QueryPool"; + case ObjectType::eBufferView: return "BufferView"; + case ObjectType::eImageView: return "ImageView"; + case ObjectType::eShaderModule: return "ShaderModule"; + case ObjectType::ePipelineCache: return "PipelineCache"; + case ObjectType::ePipelineLayout: return "PipelineLayout"; + case ObjectType::eRenderPass: return "RenderPass"; + case ObjectType::ePipeline: return "Pipeline"; + case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout"; + case ObjectType::eSampler: return "Sampler"; + case ObjectType::eDescriptorPool: return "DescriptorPool"; + case ObjectType::eDescriptorSet: return "DescriptorSet"; + case ObjectType::eFramebuffer: return "Framebuffer"; + case ObjectType::eCommandPool: return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case ObjectType::ePrivateDataSlot: return "PrivateDataSlot"; + case ObjectType::eSurfaceKHR: return "SurfaceKHR"; + case ObjectType::eSwapchainKHR: return "SwapchainKHR"; + case ObjectType::eDisplayKHR: return "DisplayKHR"; + case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case ObjectType::eVideoSessionKHR: return "VideoSessionKHR"; + case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR"; + case ObjectType::eCuModuleNVX: return "CuModuleNVX"; + case ObjectType::eCuFunctionNVX: return "CuFunctionNVX"; + case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; + case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT"; + case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL"; + case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR"; + case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ObjectType::eCudaModuleNV: return "CudaModuleNV"; + case ObjectType::eCudaFunctionNV: return "CudaFunctionNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case ObjectType::eMicromapEXT: return "MicromapEXT"; + case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV"; + case ObjectType::eShaderEXT: return "ShaderEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VendorId value ) + { + switch ( value ) + { + case VendorId::eVIV: return "VIV"; + case VendorId::eVSI: return "VSI"; + case VendorId::eKazan: return "Kazan"; + case VendorId::eCodeplay: return "Codeplay"; + case VendorId::eMESA: return "MESA"; + case VendorId::ePocl: return "Pocl"; + case VendorId::eMobileye: return "Mobileye"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( Format value ) + { + switch ( value ) + { + case Format::eUndefined: return "Undefined"; + case Format::eR4G4UnormPack8: return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm: return "R8Unorm"; + case Format::eR8Snorm: return "R8Snorm"; + case Format::eR8Uscaled: return "R8Uscaled"; + case Format::eR8Sscaled: return "R8Sscaled"; + case Format::eR8Uint: return "R8Uint"; + case Format::eR8Sint: return "R8Sint"; + case Format::eR8Srgb: return "R8Srgb"; + case Format::eR8G8Unorm: return "R8G8Unorm"; + case Format::eR8G8Snorm: return "R8G8Snorm"; + case Format::eR8G8Uscaled: return "R8G8Uscaled"; + case Format::eR8G8Sscaled: return "R8G8Sscaled"; + case Format::eR8G8Uint: return "R8G8Uint"; + case Format::eR8G8Sint: return "R8G8Sint"; + case Format::eR8G8Srgb: return "R8G8Srgb"; + case Format::eR8G8B8Unorm: return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm: return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint: return "R8G8B8Uint"; + case Format::eR8G8B8Sint: return "R8G8B8Sint"; + case Format::eR8G8B8Srgb: return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm: return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm: return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint: return "B8G8R8Uint"; + case Format::eB8G8R8Sint: return "B8G8R8Sint"; + case Format::eB8G8R8Srgb: return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32"; + case Format::eR16Unorm: return "R16Unorm"; + case Format::eR16Snorm: return "R16Snorm"; + case Format::eR16Uscaled: return "R16Uscaled"; + case Format::eR16Sscaled: return "R16Sscaled"; + case Format::eR16Uint: return "R16Uint"; + case Format::eR16Sint: return "R16Sint"; + case Format::eR16Sfloat: return "R16Sfloat"; + case Format::eR16G16Unorm: return "R16G16Unorm"; + case Format::eR16G16Snorm: return "R16G16Snorm"; + case Format::eR16G16Uscaled: return "R16G16Uscaled"; + case Format::eR16G16Sscaled: return "R16G16Sscaled"; + case Format::eR16G16Uint: return "R16G16Uint"; + case Format::eR16G16Sint: return "R16G16Sint"; + case Format::eR16G16Sfloat: return "R16G16Sfloat"; + case Format::eR16G16B16Unorm: return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm: return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint: return "R16G16B16Uint"; + case Format::eR16G16B16Sint: return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat"; + case Format::eR32Uint: return "R32Uint"; + case Format::eR32Sint: return "R32Sint"; + case Format::eR32Sfloat: return "R32Sfloat"; + case Format::eR32G32Uint: return "R32G32Uint"; + case Format::eR32G32Sint: return "R32G32Sint"; + case Format::eR32G32Sfloat: return "R32G32Sfloat"; + case Format::eR32G32B32Uint: return "R32G32B32Uint"; + case Format::eR32G32B32Sint: return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat"; + case Format::eR64Uint: return "R64Uint"; + case Format::eR64Sint: return "R64Sint"; + case Format::eR64Sfloat: return "R64Sfloat"; + case Format::eR64G64Uint: return "R64G64Uint"; + case Format::eR64G64Sint: return "R64G64Sint"; + case Format::eR64G64Sfloat: return "R64G64Sfloat"; + case Format::eR64G64B64Uint: return "R64G64B64Uint"; + case Format::eR64G64B64Sint: return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm: return "D16Unorm"; + case Format::eX8D24UnormPack32: return "X8D24UnormPack32"; + case Format::eD32Sfloat: return "D32Sfloat"; + case Format::eS8Uint: return "S8Uint"; + case Format::eD16UnormS8Uint: return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint: return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock: return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock: return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock: return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock: return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock: return "Bc4UnormBlock"; + case Format::eBc4SnormBlock: return "Bc4SnormBlock"; + case Format::eBc5UnormBlock: return "Bc5UnormBlock"; + case Format::eBc5SnormBlock: return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock: return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock: return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock: return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock: return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16: return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16: return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm"; + case Format::eG8B8R82Plane444Unorm: return "G8B8R82Plane444Unorm"; + case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "G10X6B10X6R10X62Plane444Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "G12X4B12X4R12X42Plane444Unorm3Pack16"; + case Format::eG16B16R162Plane444Unorm: return "G16B16R162Plane444Unorm"; + case Format::eA4R4G4B4UnormPack16: return "A4R4G4B4UnormPack16"; + case Format::eA4B4G4R4UnormPack16: return "A4B4G4R4UnormPack16"; + case Format::eAstc4x4SfloatBlock: return "Astc4x4SfloatBlock"; + case Format::eAstc5x4SfloatBlock: return "Astc5x4SfloatBlock"; + case Format::eAstc5x5SfloatBlock: return "Astc5x5SfloatBlock"; + case Format::eAstc6x5SfloatBlock: return "Astc6x5SfloatBlock"; + case Format::eAstc6x6SfloatBlock: return "Astc6x6SfloatBlock"; + case Format::eAstc8x5SfloatBlock: return "Astc8x5SfloatBlock"; + case Format::eAstc8x6SfloatBlock: return "Astc8x6SfloatBlock"; + case Format::eAstc8x8SfloatBlock: return "Astc8x8SfloatBlock"; + case Format::eAstc10x5SfloatBlock: return "Astc10x5SfloatBlock"; + case Format::eAstc10x6SfloatBlock: return "Astc10x6SfloatBlock"; + case Format::eAstc10x8SfloatBlock: return "Astc10x8SfloatBlock"; + case Format::eAstc10x10SfloatBlock: return "Astc10x10SfloatBlock"; + case Format::eAstc12x10SfloatBlock: return "Astc12x10SfloatBlock"; + case Format::eAstc12x12SfloatBlock: return "Astc12x12SfloatBlock"; + case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + case Format::eR16G16S105NV: return "R16G16S105NV"; + case Format::eA1B5G5R5UnormPack16KHR: return "A1B5G5R5UnormPack16KHR"; + case Format::eA8UnormKHR: return "A8UnormKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value ) + { + switch ( value ) + { + case FormatFeatureFlagBits::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; + case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; + case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits::eSampledImageFilterCubicEXT: return "SampledImageFilterCubicEXT"; + case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; + case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value ) + { + switch ( value ) + { + case ImageCreateFlagBits::eSparseBinding: return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency: return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased: return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat: return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible"; + case ImageCreateFlagBits::eAlias: return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected: return "Protected"; + case ImageCreateFlagBits::eDisjoint: return "Disjoint"; + case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; + case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT: return "MultisampledRenderToSingleSampledEXT"; + case ImageCreateFlagBits::e2DViewCompatibleEXT: return "2DViewCompatibleEXT"; + case ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM: return "FragmentDensityMapOffsetQCOM"; + case ImageCreateFlagBits::eVideoProfileIndependentKHR: return "VideoProfileIndependentKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ImageTiling value ) + { + switch ( value ) + { + case ImageTiling::eOptimal: return "Optimal"; + case ImageTiling::eLinear: return "Linear"; + case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ImageType value ) + { + switch ( value ) + { + case ImageType::e1D: return "1D"; + case ImageType::e2D: return "2D"; + case ImageType::e3D: return "3D"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value ) + { + switch ( value ) + { + case ImageUsageFlagBits::eTransferSrc: return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst: return "TransferDst"; + case ImageUsageFlagBits::eSampled: return "Sampled"; + case ImageUsageFlagBits::eStorage: return "Storage"; + case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; + case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; + case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case ImageUsageFlagBits::eHostTransferEXT: return "HostTransferEXT"; + case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; + case ImageUsageFlagBits::eAttachmentFeedbackLoopEXT: return "AttachmentFeedbackLoopEXT"; + case ImageUsageFlagBits::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; + case ImageUsageFlagBits::eSampleWeightQCOM: return "SampleWeightQCOM"; + case ImageUsageFlagBits::eSampleBlockMatchQCOM: return "SampleBlockMatchQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits value ) + { + switch ( value ) + { + case InstanceCreateFlagBits::eEnumeratePortabilityKHR: return "EnumeratePortabilityKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value ) + { + switch ( value ) + { + case InternalAllocationType::eExecutable: return "Executable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value ) + { + switch ( value ) + { + case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value ) + { + switch ( value ) + { + case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible: return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached: return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected: return "Protected"; + case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD"; + case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD"; + case MemoryPropertyFlagBits::eRdmaCapableNV: return "RdmaCapableNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value ) + { + switch ( value ) + { + case PhysicalDeviceType::eOther: return "Other"; + case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu"; + case PhysicalDeviceType::eCpu: return "Cpu"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value ) + { + switch ( value ) + { + case QueueFlagBits::eGraphics: return "Graphics"; + case QueueFlagBits::eCompute: return "Compute"; + case QueueFlagBits::eTransfer: return "Transfer"; + case QueueFlagBits::eSparseBinding: return "SparseBinding"; + case QueueFlagBits::eProtected: return "Protected"; + case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR"; + case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR"; + case QueueFlagBits::eOpticalFlowNV: return "OpticalFlowNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value ) + { + switch ( value ) + { + case SampleCountFlagBits::e1: return "1"; + case SampleCountFlagBits::e2: return "2"; + case SampleCountFlagBits::e4: return "4"; + case SampleCountFlagBits::e8: return "8"; + case SampleCountFlagBits::e16: return "16"; + case SampleCountFlagBits::e32: return "32"; + case SampleCountFlagBits::e64: return "64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value ) + { + switch ( value ) + { + case SystemAllocationScope::eCommand: return "Command"; + case SystemAllocationScope::eObject: return "Object"; + case SystemAllocationScope::eCache: return "Cache"; + case SystemAllocationScope::eDevice: return "Device"; + case SystemAllocationScope::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) + { + switch ( value ) + { + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) + { + switch ( value ) + { + case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits::eTransfer: return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits::eHost: return "Host"; + case PipelineStageFlagBits::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits::eNone: return "None"; + case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV"; + case PipelineStageFlagBits::eTaskShaderEXT: return "TaskShaderEXT"; + case PipelineStageFlagBits::eMeshShaderEXT: return "MeshShaderEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) + { + switch ( value ) + { + case ImageAspectFlagBits::eColor: return "Color"; + case ImageAspectFlagBits::eDepth: return "Depth"; + case ImageAspectFlagBits::eStencil: return "Stencil"; + case ImageAspectFlagBits::eMetadata: return "Metadata"; + case ImageAspectFlagBits::ePlane0: return "Plane0"; + case ImageAspectFlagBits::ePlane1: return "Plane1"; + case ImageAspectFlagBits::ePlane2: return "Plane2"; + case ImageAspectFlagBits::eNone: return "None"; + case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT"; + case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT"; + case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT"; + case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value ) + { + switch ( value ) + { + case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value ) + { + switch ( value ) + { + case SparseMemoryBindFlagBits::eMetadata: return "Metadata"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value ) + { + switch ( value ) + { + case FenceCreateFlagBits::eSignaled: return "Signaled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value ) + { + switch ( value ) + { + case EventCreateFlagBits::eDeviceOnly: return "DeviceOnly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value ) + { + switch ( value ) + { + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT: return "TaskShaderInvocationsEXT"; + case QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT: return "MeshShaderInvocationsEXT"; + case QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI: return "ClusterCullingShaderInvocationsHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value ) + { + switch ( value ) + { + case QueryResultFlagBits::e64: return "64"; + case QueryResultFlagBits::eWait: return "Wait"; + case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; + case QueryResultFlagBits::ePartial: return "Partial"; + case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( QueryType value ) + { + switch ( value ) + { + case QueryType::eOcclusion: return "Occlusion"; + case QueryType::ePipelineStatistics: return "PipelineStatistics"; + case QueryType::eTimestamp: return "Timestamp"; + case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR"; + case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT"; + case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR"; + case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR"; + case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR"; + case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV"; + case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL"; + case QueryType::eVideoEncodeFeedbackKHR: return "VideoEncodeFeedbackKHR"; + case QueryType::eMeshPrimitivesGeneratedEXT: return "MeshPrimitivesGeneratedEXT"; + case QueryType::ePrimitivesGeneratedEXT: return "PrimitivesGeneratedEXT"; + case QueryType::eAccelerationStructureSerializationBottomLevelPointersKHR: return "AccelerationStructureSerializationBottomLevelPointersKHR"; + case QueryType::eAccelerationStructureSizeKHR: return "AccelerationStructureSizeKHR"; + case QueryType::eMicromapSerializationSizeEXT: return "MicromapSerializationSizeEXT"; + case QueryType::eMicromapCompactedSizeEXT: return "MicromapCompactedSizeEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value ) + { + switch ( value ) + { + case BufferCreateFlagBits::eSparseBinding: return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency: return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; + case BufferCreateFlagBits::eProtected: return "Protected"; + case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + case BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case BufferCreateFlagBits::eVideoProfileIndependentKHR: return "VideoProfileIndependentKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value ) + { + switch ( value ) + { + case BufferUsageFlagBits::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress"; + case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR"; + case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; + case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; + case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case BufferUsageFlagBits::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; + case BufferUsageFlagBits::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; + case BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; + case BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; + case BufferUsageFlagBits::eMicromapStorageEXT: return "MicromapStorageEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SharingMode value ) + { + switch ( value ) + { + case SharingMode::eExclusive: return "Exclusive"; + case SharingMode::eConcurrent: return "Concurrent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( ImageLayout value ) + { + switch ( value ) + { + case ImageLayout::eUndefined: return "Undefined"; + case ImageLayout::eGeneral: return "General"; + case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal"; + case ImageLayout::ePreinitialized: return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal"; + case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal"; + case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal"; + case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal"; + case ImageLayout::eReadOnlyOptimal: return "ReadOnlyOptimal"; + case ImageLayout::eAttachmentOptimal: return "AttachmentOptimal"; + case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; + case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; + case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; + case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; + case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR"; + case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; + case ImageLayout::eAttachmentFeedbackLoopOptimalEXT: return "AttachmentFeedbackLoopOptimalEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value ) + { + switch ( value ) + { + case ComponentSwizzle::eIdentity: return "Identity"; + case ComponentSwizzle::eZero: return "Zero"; + case ComponentSwizzle::eOne: return "One"; + case ComponentSwizzle::eR: return "R"; + case ComponentSwizzle::eG: return "G"; + case ComponentSwizzle::eB: return "B"; + case ComponentSwizzle::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value ) + { + switch ( value ) + { + case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT"; + case ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ImageViewType value ) + { + switch ( value ) + { + case ImageViewType::e1D: return "1D"; + case ImageViewType::e2D: return "2D"; + case ImageViewType::e3D: return "3D"; + case ImageViewType::eCube: return "Cube"; + case ImageViewType::e1DArray: return "1DArray"; + case ImageViewType::e2DArray: return "2DArray"; + case ImageViewType::eCubeArray: return "CubeArray"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCacheCreateFlagBits::eExternallySynchronized: return "ExternallySynchronized"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) + { + switch ( value ) + { + case BlendFactor::eZero: return "Zero"; + case BlendFactor::eOne: return "One"; + case BlendFactor::eSrcColor: return "SrcColor"; + case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor"; + case BlendFactor::eDstColor: return "DstColor"; + case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha: return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha: return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor: return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha: return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color: return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha: return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BlendOp value ) + { + switch ( value ) + { + case BlendOp::eAdd: return "Add"; + case BlendOp::eSubtract: return "Subtract"; + case BlendOp::eReverseSubtract: return "ReverseSubtract"; + case BlendOp::eMin: return "Min"; + case BlendOp::eMax: return "Max"; + case BlendOp::eZeroEXT: return "ZeroEXT"; + case BlendOp::eSrcEXT: return "SrcEXT"; + case BlendOp::eDstEXT: return "DstEXT"; + case BlendOp::eSrcOverEXT: return "SrcOverEXT"; + case BlendOp::eDstOverEXT: return "DstOverEXT"; + case BlendOp::eSrcInEXT: return "SrcInEXT"; + case BlendOp::eDstInEXT: return "DstInEXT"; + case BlendOp::eSrcOutEXT: return "SrcOutEXT"; + case BlendOp::eDstOutEXT: return "DstOutEXT"; + case BlendOp::eSrcAtopEXT: return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT: return "DstAtopEXT"; + case BlendOp::eXorEXT: return "XorEXT"; + case BlendOp::eMultiplyEXT: return "MultiplyEXT"; + case BlendOp::eScreenEXT: return "ScreenEXT"; + case BlendOp::eOverlayEXT: return "OverlayEXT"; + case BlendOp::eDarkenEXT: return "DarkenEXT"; + case BlendOp::eLightenEXT: return "LightenEXT"; + case BlendOp::eColordodgeEXT: return "ColordodgeEXT"; + case BlendOp::eColorburnEXT: return "ColorburnEXT"; + case BlendOp::eHardlightEXT: return "HardlightEXT"; + case BlendOp::eSoftlightEXT: return "SoftlightEXT"; + case BlendOp::eDifferenceEXT: return "DifferenceEXT"; + case BlendOp::eExclusionEXT: return "ExclusionEXT"; + case BlendOp::eInvertEXT: return "InvertEXT"; + case BlendOp::eInvertRgbEXT: return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT: return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT: return "LinearburnEXT"; + case BlendOp::eVividlightEXT: return "VividlightEXT"; + case BlendOp::eLinearlightEXT: return "LinearlightEXT"; + case BlendOp::ePinlightEXT: return "PinlightEXT"; + case BlendOp::eHardmixEXT: return "HardmixEXT"; + case BlendOp::eHslHueEXT: return "HslHueEXT"; + case BlendOp::eHslSaturationEXT: return "HslSaturationEXT"; + case BlendOp::eHslColorEXT: return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT"; + case BlendOp::ePlusEXT: return "PlusEXT"; + case BlendOp::ePlusClampedEXT: return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT"; + case BlendOp::eMinusEXT: return "MinusEXT"; + case BlendOp::eMinusClampedEXT: return "MinusClampedEXT"; + case BlendOp::eContrastEXT: return "ContrastEXT"; + case BlendOp::eInvertOvgEXT: return "InvertOvgEXT"; + case BlendOp::eRedEXT: return "RedEXT"; + case BlendOp::eGreenEXT: return "GreenEXT"; + case BlendOp::eBlueEXT: return "BlueEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value ) + { + switch ( value ) + { + case ColorComponentFlagBits::eR: return "R"; + case ColorComponentFlagBits::eG: return "G"; + case ColorComponentFlagBits::eB: return "B"; + case ColorComponentFlagBits::eA: return "A"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CompareOp value ) + { + switch ( value ) + { + case CompareOp::eNever: return "Never"; + case CompareOp::eLess: return "Less"; + case CompareOp::eEqual: return "Equal"; + case CompareOp::eLessOrEqual: return "LessOrEqual"; + case CompareOp::eGreater: return "Greater"; + case CompareOp::eNotEqual: return "NotEqual"; + case CompareOp::eGreaterOrEqual: return "GreaterOrEqual"; + case CompareOp::eAlways: return "Always"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value ) + { + switch ( value ) + { + case CullModeFlagBits::eNone: return "None"; + case CullModeFlagBits::eFront: return "Front"; + case CullModeFlagBits::eBack: return "Back"; + case CullModeFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DynamicState value ) + { + switch ( value ) + { + case DynamicState::eViewport: return "Viewport"; + case DynamicState::eScissor: return "Scissor"; + case DynamicState::eLineWidth: return "LineWidth"; + case DynamicState::eDepthBias: return "DepthBias"; + case DynamicState::eBlendConstants: return "BlendConstants"; + case DynamicState::eDepthBounds: return "DepthBounds"; + case DynamicState::eStencilCompareMask: return "StencilCompareMask"; + case DynamicState::eStencilWriteMask: return "StencilWriteMask"; + case DynamicState::eStencilReference: return "StencilReference"; + case DynamicState::eCullMode: return "CullMode"; + case DynamicState::eFrontFace: return "FrontFace"; + case DynamicState::ePrimitiveTopology: return "PrimitiveTopology"; + case DynamicState::eViewportWithCount: return "ViewportWithCount"; + case DynamicState::eScissorWithCount: return "ScissorWithCount"; + case DynamicState::eVertexInputBindingStride: return "VertexInputBindingStride"; + case DynamicState::eDepthTestEnable: return "DepthTestEnable"; + case DynamicState::eDepthWriteEnable: return "DepthWriteEnable"; + case DynamicState::eDepthCompareOp: return "DepthCompareOp"; + case DynamicState::eDepthBoundsTestEnable: return "DepthBoundsTestEnable"; + case DynamicState::eStencilTestEnable: return "StencilTestEnable"; + case DynamicState::eStencilOp: return "StencilOp"; + case DynamicState::eRasterizerDiscardEnable: return "RasterizerDiscardEnable"; + case DynamicState::eDepthBiasEnable: return "DepthBiasEnable"; + case DynamicState::ePrimitiveRestartEnable: return "PrimitiveRestartEnable"; + case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eDiscardRectangleEnableEXT: return "DiscardRectangleEnableEXT"; + case DynamicState::eDiscardRectangleModeEXT: return "DiscardRectangleModeEXT"; + case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; + case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR"; + case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV"; + case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorEnableNV: return "ExclusiveScissorEnableNV"; + case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; + case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR"; + case DynamicState::eLineStippleEXT: return "LineStippleEXT"; + case DynamicState::eVertexInputEXT: return "VertexInputEXT"; + case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT"; + case DynamicState::eLogicOpEXT: return "LogicOpEXT"; + case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT"; + case DynamicState::eTessellationDomainOriginEXT: return "TessellationDomainOriginEXT"; + case DynamicState::eDepthClampEnableEXT: return "DepthClampEnableEXT"; + case DynamicState::ePolygonModeEXT: return "PolygonModeEXT"; + case DynamicState::eRasterizationSamplesEXT: return "RasterizationSamplesEXT"; + case DynamicState::eSampleMaskEXT: return "SampleMaskEXT"; + case DynamicState::eAlphaToCoverageEnableEXT: return "AlphaToCoverageEnableEXT"; + case DynamicState::eAlphaToOneEnableEXT: return "AlphaToOneEnableEXT"; + case DynamicState::eLogicOpEnableEXT: return "LogicOpEnableEXT"; + case DynamicState::eColorBlendEnableEXT: return "ColorBlendEnableEXT"; + case DynamicState::eColorBlendEquationEXT: return "ColorBlendEquationEXT"; + case DynamicState::eColorWriteMaskEXT: return "ColorWriteMaskEXT"; + case DynamicState::eRasterizationStreamEXT: return "RasterizationStreamEXT"; + case DynamicState::eConservativeRasterizationModeEXT: return "ConservativeRasterizationModeEXT"; + case DynamicState::eExtraPrimitiveOverestimationSizeEXT: return "ExtraPrimitiveOverestimationSizeEXT"; + case DynamicState::eDepthClipEnableEXT: return "DepthClipEnableEXT"; + case DynamicState::eSampleLocationsEnableEXT: return "SampleLocationsEnableEXT"; + case DynamicState::eColorBlendAdvancedEXT: return "ColorBlendAdvancedEXT"; + case DynamicState::eProvokingVertexModeEXT: return "ProvokingVertexModeEXT"; + case DynamicState::eLineRasterizationModeEXT: return "LineRasterizationModeEXT"; + case DynamicState::eLineStippleEnableEXT: return "LineStippleEnableEXT"; + case DynamicState::eDepthClipNegativeOneToOneEXT: return "DepthClipNegativeOneToOneEXT"; + case DynamicState::eViewportWScalingEnableNV: return "ViewportWScalingEnableNV"; + case DynamicState::eViewportSwizzleNV: return "ViewportSwizzleNV"; + case DynamicState::eCoverageToColorEnableNV: return "CoverageToColorEnableNV"; + case DynamicState::eCoverageToColorLocationNV: return "CoverageToColorLocationNV"; + case DynamicState::eCoverageModulationModeNV: return "CoverageModulationModeNV"; + case DynamicState::eCoverageModulationTableEnableNV: return "CoverageModulationTableEnableNV"; + case DynamicState::eCoverageModulationTableNV: return "CoverageModulationTableNV"; + case DynamicState::eShadingRateImageEnableNV: return "ShadingRateImageEnableNV"; + case DynamicState::eRepresentativeFragmentTestEnableNV: return "RepresentativeFragmentTestEnableNV"; + case DynamicState::eCoverageReductionModeNV: return "CoverageReductionModeNV"; + case DynamicState::eAttachmentFeedbackLoopEnableEXT: return "AttachmentFeedbackLoopEnableEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( FrontFace value ) + { + switch ( value ) + { + case FrontFace::eCounterClockwise: return "CounterClockwise"; + case FrontFace::eClockwise: return "Clockwise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( LogicOp value ) + { + switch ( value ) + { + case LogicOp::eClear: return "Clear"; + case LogicOp::eAnd: return "And"; + case LogicOp::eAndReverse: return "AndReverse"; + case LogicOp::eCopy: return "Copy"; + case LogicOp::eAndInverted: return "AndInverted"; + case LogicOp::eNoOp: return "NoOp"; + case LogicOp::eXor: return "Xor"; + case LogicOp::eOr: return "Or"; + case LogicOp::eNor: return "Nor"; + case LogicOp::eEquivalent: return "Equivalent"; + case LogicOp::eInvert: return "Invert"; + case LogicOp::eOrReverse: return "OrReverse"; + case LogicOp::eCopyInverted: return "CopyInverted"; + case LogicOp::eOrInverted: return "OrInverted"; + case LogicOp::eNand: return "Nand"; + case LogicOp::eSet: return "Set"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative: return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; + case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; + case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR: return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR"; + case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; + case PipelineCreateFlagBits::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; + case PipelineCreateFlagBits::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; + case PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineCreateFlagBits::eNoProtectedAccessEXT: return "NoProtectedAccessEXT"; + case PipelineCreateFlagBits::eProtectedAccessOnlyEXT: return "ProtectedAccessOnlyEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value ) + { + switch ( value ) + { + case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize"; + case PipelineShaderStageCreateFlagBits::eRequireFullSubgroups: return "RequireFullSubgroups"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PolygonMode value ) + { + switch ( value ) + { + case PolygonMode::eFill: return "Fill"; + case PolygonMode::eLine: return "Line"; + case PolygonMode::ePoint: return "Point"; + case PolygonMode::eFillRectangleNV: return "FillRectangleNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value ) + { + switch ( value ) + { + case PrimitiveTopology::ePointList: return "PointList"; + case PrimitiveTopology::eLineList: return "LineList"; + case PrimitiveTopology::eLineStrip: return "LineStrip"; + case PrimitiveTopology::eTriangleList: return "TriangleList"; + case PrimitiveTopology::eTriangleStrip: return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan: return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList: return "PatchList"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) + { + switch ( value ) + { + case ShaderStageFlagBits::eVertex: return "Vertex"; + case ShaderStageFlagBits::eTessellationControl: return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry: return "Geometry"; + case ShaderStageFlagBits::eFragment: return "Fragment"; + case ShaderStageFlagBits::eCompute: return "Compute"; + case ShaderStageFlagBits::eAllGraphics: return "AllGraphics"; + case ShaderStageFlagBits::eAll: return "All"; + case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR"; + case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR"; + case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR"; + case ShaderStageFlagBits::eMissKHR: return "MissKHR"; + case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR"; + case ShaderStageFlagBits::eCallableKHR: return "CallableKHR"; + case ShaderStageFlagBits::eTaskEXT: return "TaskEXT"; + case ShaderStageFlagBits::eMeshEXT: return "MeshEXT"; + case ShaderStageFlagBits::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + case ShaderStageFlagBits::eClusterCullingHUAWEI: return "ClusterCullingHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( StencilOp value ) + { + switch ( value ) + { + case StencilOp::eKeep: return "Keep"; + case StencilOp::eZero: return "Zero"; + case StencilOp::eReplace: return "Replace"; + case StencilOp::eIncrementAndClamp: return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp: return "DecrementAndClamp"; + case StencilOp::eInvert: return "Invert"; + case StencilOp::eIncrementAndWrap: return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap: return "DecrementAndWrap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VertexInputRate value ) + { + switch ( value ) + { + case VertexInputRate::eVertex: return "Vertex"; + case VertexInputRate::eInstance: return "Instance"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits value ) + { + switch ( value ) + { + case PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT: return "RasterizationOrderAttachmentAccessEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits value ) + { + switch ( value ) + { + case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT: return "RasterizationOrderAttachmentDepthAccessEXT"; + case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT: return "RasterizationOrderAttachmentStencilAccessEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits value ) + { + switch ( value ) + { + case PipelineLayoutCreateFlagBits::eIndependentSetsEXT: return "IndependentSetsEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( BorderColor value ) + { + switch ( value ) + { + case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack: return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite"; + case BorderColor::eFloatCustomEXT: return "FloatCustomEXT"; + case BorderColor::eIntCustomEXT: return "IntCustomEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( Filter value ) + { + switch ( value ) + { + case Filter::eNearest: return "Nearest"; + case Filter::eLinear: return "Linear"; + case Filter::eCubicEXT: return "CubicEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value ) + { + switch ( value ) + { + case SamplerAddressMode::eRepeat: return "Repeat"; + case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge: return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder: return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value ) + { + switch ( value ) + { + case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT"; + case SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case SamplerCreateFlagBits::eNonSeamlessCubeMapEXT: return "NonSeamlessCubeMapEXT"; + case SamplerCreateFlagBits::eImageProcessingQCOM: return "ImageProcessingQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value ) + { + switch ( value ) + { + case SamplerMipmapMode::eNearest: return "Nearest"; + case SamplerMipmapMode::eLinear: return "Linear"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorPoolCreateFlagBits::eHostOnlyEXT: return "HostOnlyEXT"; + case DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV: return "AllowOverallocationSetsNV"; + case DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV: return "AllowOverallocationPoolsNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool"; + case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT: return "EmbeddedImmutableSamplersEXT"; + case DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT: return "HostOnlyPoolEXT"; + case DescriptorSetLayoutCreateFlagBits::ePerStageNV: return "PerStageNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorType value ) + { + switch ( value ) + { + case DescriptorType::eSampler: return "Sampler"; + case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler"; + case DescriptorType::eSampledImage: return "SampledImage"; + case DescriptorType::eStorageImage: return "StorageImage"; + case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer: return "UniformBuffer"; + case DescriptorType::eStorageBuffer: return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment: return "InputAttachment"; + case DescriptorType::eInlineUniformBlock: return "InlineUniformBlock"; + case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV"; + case DescriptorType::eSampleWeightImageQCOM: return "SampleWeightImageQCOM"; + case DescriptorType::eBlockMatchImageQCOM: return "BlockMatchImageQCOM"; + case DescriptorType::eMutableEXT: return "MutableEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) + { + switch ( value ) + { + case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead: return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead: return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead: return "ShaderRead"; + case AccessFlagBits::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead: return "TransferRead"; + case AccessFlagBits::eTransferWrite: return "TransferWrite"; + case AccessFlagBits::eHostRead: return "HostRead"; + case AccessFlagBits::eHostWrite: return "HostWrite"; + case AccessFlagBits::eMemoryRead: return "MemoryRead"; + case AccessFlagBits::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits::eNone: return "None"; + case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; + case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; + case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value ) + { + switch ( value ) + { + case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value ) + { + switch ( value ) + { + case AttachmentLoadOp::eLoad: return "Load"; + case AttachmentLoadOp::eClear: return "Clear"; + case AttachmentLoadOp::eDontCare: return "DontCare"; + case AttachmentLoadOp::eNoneEXT: return "NoneEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value ) + { + switch ( value ) + { + case AttachmentStoreOp::eStore: return "Store"; + case AttachmentStoreOp::eDontCare: return "DontCare"; + case AttachmentStoreOp::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value ) + { + switch ( value ) + { + case DependencyFlagBits::eByRegion: return "ByRegion"; + case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; + case DependencyFlagBits::eViewLocal: return "ViewLocal"; + case DependencyFlagBits::eFeedbackLoopEXT: return "FeedbackLoopEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value ) + { + switch ( value ) + { + case FramebufferCreateFlagBits::eImageless: return "Imageless"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value ) + { + switch ( value ) + { + case PipelineBindPoint::eGraphics: return "Graphics"; + case PipelineBindPoint::eCompute: return "Compute"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineBindPoint::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR"; + case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value ) + { + switch ( value ) + { + case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value ) + { + switch ( value ) + { + case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX"; + case SubpassDescriptionFlagBits::eFragmentRegionQCOM: return "FragmentRegionQCOM"; + case SubpassDescriptionFlagBits::eShaderResolveQCOM: return "ShaderResolveQCOM"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT: return "RasterizationOrderAttachmentColorAccessEXT"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT: return "RasterizationOrderAttachmentDepthAccessEXT"; + case SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT: return "RasterizationOrderAttachmentStencilAccessEXT"; + case SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value ) + { + switch ( value ) + { + case CommandPoolCreateFlagBits::eTransient: return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value ) + { + switch ( value ) + { + case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value ) + { + switch ( value ) + { + case CommandBufferLevel::ePrimary: return "Primary"; + case CommandBufferLevel::eSecondary: return "Secondary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value ) + { + switch ( value ) + { + case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value ) + { + switch ( value ) + { + case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value ) + { + switch ( value ) + { + case QueryControlFlagBits::ePrecise: return "Precise"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndexType value ) + { + switch ( value ) + { + case IndexType::eUint16: return "Uint16"; + case IndexType::eUint32: return "Uint32"; + case IndexType::eNoneKHR: return "NoneKHR"; + case IndexType::eUint8EXT: return "Uint8EXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value ) + { + switch ( value ) + { + case StencilFaceFlagBits::eFront: return "Front"; + case StencilFaceFlagBits::eBack: return "Back"; + case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SubpassContents value ) + { + switch ( value ) + { + case SubpassContents::eInline: return "Inline"; + case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; + case SubpassContents::eInlineAndSecondaryCommandBuffersEXT: return "InlineAndSecondaryCommandBuffersEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value ) + { + switch ( value ) + { + case SubgroupFeatureFlagBits::eBasic: return "Basic"; + case SubgroupFeatureFlagBits::eVote: return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot: return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle: return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered: return "Clustered"; + case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value ) + { + switch ( value ) + { + case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask"; + case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress"; + case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value ) + { + switch ( value ) + { + case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value ) + { + switch ( value ) + { + case TessellationDomainOrigin::eUpperLeft: return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft: return "LowerLeft"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value ) + { + switch ( value ) + { + case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value ) + { + switch ( value ) + { + case SamplerYcbcrRange::eItuFull: return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ChromaLocation value ) + { + switch ( value ) + { + case ChromaLocation::eCositedEven: return "CositedEven"; + case ChromaLocation::eMidpoint: return "Midpoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value ) + { + switch ( value ) + { + case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + case ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV: return "RdmaAddressNV"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX: return "ScreenBufferQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalFenceFeatureFlagBits::eExportable: return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value ) + { + switch ( value ) + { + case FenceImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value ) + { + switch ( value ) + { + case SemaphoreImportFlagBits::eTemporary: return "Temporary"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd"; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA: return "ZirconEventFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_INLINE std::string to_string( DriverId value ) + { + switch ( value ) + { + case DriverId::eAmdProprietary: return "AmdProprietary"; + case DriverId::eAmdOpenSource: return "AmdOpenSource"; + case DriverId::eMesaRadv: return "MesaRadv"; + case DriverId::eNvidiaProprietary: return "NvidiaProprietary"; + case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows"; + case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA"; + case DriverId::eImaginationProprietary: return "ImaginationProprietary"; + case DriverId::eQualcommProprietary: return "QualcommProprietary"; + case DriverId::eArmProprietary: return "ArmProprietary"; + case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader"; + case DriverId::eGgpProprietary: return "GgpProprietary"; + case DriverId::eBroadcomProprietary: return "BroadcomProprietary"; + case DriverId::eMesaLlvmpipe: return "MesaLlvmpipe"; + case DriverId::eMoltenvk: return "Moltenvk"; + case DriverId::eCoreaviProprietary: return "CoreaviProprietary"; + case DriverId::eJuiceProprietary: return "JuiceProprietary"; + case DriverId::eVerisiliconProprietary: return "VerisiliconProprietary"; + case DriverId::eMesaTurnip: return "MesaTurnip"; + case DriverId::eMesaV3Dv: return "MesaV3Dv"; + case DriverId::eMesaPanvk: return "MesaPanvk"; + case DriverId::eSamsungProprietary: return "SamsungProprietary"; + case DriverId::eMesaVenus: return "MesaVenus"; + case DriverId::eMesaDozen: return "MesaDozen"; + case DriverId::eMesaNvk: return "MesaNvk"; + case DriverId::eImaginationOpenSourceMESA: return "ImaginationOpenSourceMESA"; + case DriverId::eMesaAgxv: return "MesaAgxv"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value ) + { + switch ( value ) + { + case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly"; + case ShaderFloatControlsIndependence::eAll: return "All"; + case ShaderFloatControlsIndependence::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value ) + { + switch ( value ) + { + case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; + case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound"; + case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value ) + { + switch ( value ) + { + case ResolveModeFlagBits::eNone: return "None"; + case ResolveModeFlagBits::eSampleZero: return "SampleZero"; + case ResolveModeFlagBits::eAverage: return "Average"; + case ResolveModeFlagBits::eMin: return "Min"; + case ResolveModeFlagBits::eMax: return "Max"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case ResolveModeFlagBits::eExternalFormatDownsampleANDROID: return "ExternalFormatDownsampleANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value ) + { + switch ( value ) + { + case SamplerReductionMode::eWeightedAverage: return "WeightedAverage"; + case SamplerReductionMode::eMin: return "Min"; + case SamplerReductionMode::eMax: return "Max"; + case SamplerReductionMode::eWeightedAverageRangeclampQCOM: return "WeightedAverageRangeclampQCOM"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreType value ) + { + switch ( value ) + { + case SemaphoreType::eBinary: return "Binary"; + case SemaphoreType::eTimeline: return "Timeline"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value ) + { + switch ( value ) + { + case SemaphoreWaitFlagBits::eAny: return "Any"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBits value ) + { + switch ( value ) + { + case PipelineCreationFeedbackFlagBits::eValid: return "Valid"; + case PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit"; + case PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration: return "BasePipelineAcceleration"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBits value ) + { + switch ( value ) + { + case ToolPurposeFlagBits::eValidation: return "Validation"; + case ToolPurposeFlagBits::eProfiling: return "Profiling"; + case ToolPurposeFlagBits::eTracing: return "Tracing"; + case ToolPurposeFlagBits::eAdditionalFeatures: return "AdditionalFeatures"; + case ToolPurposeFlagBits::eModifyingFeatures: return "ModifyingFeatures"; + case ToolPurposeFlagBits::eDebugReportingEXT: return "DebugReportingEXT"; + case ToolPurposeFlagBits::eDebugMarkersEXT: return "DebugMarkersEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBits ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2 value ) + { + switch ( value ) + { + case PipelineStageFlagBits2::eNone: return "None"; + case PipelineStageFlagBits2::eTopOfPipe: return "TopOfPipe"; + case PipelineStageFlagBits2::eDrawIndirect: return "DrawIndirect"; + case PipelineStageFlagBits2::eVertexInput: return "VertexInput"; + case PipelineStageFlagBits2::eVertexShader: return "VertexShader"; + case PipelineStageFlagBits2::eTessellationControlShader: return "TessellationControlShader"; + case PipelineStageFlagBits2::eTessellationEvaluationShader: return "TessellationEvaluationShader"; + case PipelineStageFlagBits2::eGeometryShader: return "GeometryShader"; + case PipelineStageFlagBits2::eFragmentShader: return "FragmentShader"; + case PipelineStageFlagBits2::eEarlyFragmentTests: return "EarlyFragmentTests"; + case PipelineStageFlagBits2::eLateFragmentTests: return "LateFragmentTests"; + case PipelineStageFlagBits2::eColorAttachmentOutput: return "ColorAttachmentOutput"; + case PipelineStageFlagBits2::eComputeShader: return "ComputeShader"; + case PipelineStageFlagBits2::eAllTransfer: return "AllTransfer"; + case PipelineStageFlagBits2::eBottomOfPipe: return "BottomOfPipe"; + case PipelineStageFlagBits2::eHost: return "Host"; + case PipelineStageFlagBits2::eAllGraphics: return "AllGraphics"; + case PipelineStageFlagBits2::eAllCommands: return "AllCommands"; + case PipelineStageFlagBits2::eCopy: return "Copy"; + case PipelineStageFlagBits2::eResolve: return "Resolve"; + case PipelineStageFlagBits2::eBlit: return "Blit"; + case PipelineStageFlagBits2::eClear: return "Clear"; + case PipelineStageFlagBits2::eIndexInput: return "IndexInput"; + case PipelineStageFlagBits2::eVertexAttributeInput: return "VertexAttributeInput"; + case PipelineStageFlagBits2::ePreRasterizationShaders: return "PreRasterizationShaders"; + case PipelineStageFlagBits2::eVideoDecodeKHR: return "VideoDecodeKHR"; + case PipelineStageFlagBits2::eVideoEncodeKHR: return "VideoEncodeKHR"; + case PipelineStageFlagBits2::eTransformFeedbackEXT: return "TransformFeedbackEXT"; + case PipelineStageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case PipelineStageFlagBits2::eCommandPreprocessNV: return "CommandPreprocessNV"; + case PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case PipelineStageFlagBits2::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; + case PipelineStageFlagBits2::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits2::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; + case PipelineStageFlagBits2::eTaskShaderEXT: return "TaskShaderEXT"; + case PipelineStageFlagBits2::eMeshShaderEXT: return "MeshShaderEXT"; + case PipelineStageFlagBits2::eSubpassShaderHUAWEI: return "SubpassShaderHUAWEI"; + case PipelineStageFlagBits2::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; + case PipelineStageFlagBits2::eAccelerationStructureCopyKHR: return "AccelerationStructureCopyKHR"; + case PipelineStageFlagBits2::eMicromapBuildEXT: return "MicromapBuildEXT"; + case PipelineStageFlagBits2::eClusterCullingShaderHUAWEI: return "ClusterCullingShaderHUAWEI"; + case PipelineStageFlagBits2::eOpticalFlowNV: return "OpticalFlowNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2 value ) + { + switch ( value ) + { + case AccessFlagBits2::eNone: return "None"; + case AccessFlagBits2::eIndirectCommandRead: return "IndirectCommandRead"; + case AccessFlagBits2::eIndexRead: return "IndexRead"; + case AccessFlagBits2::eVertexAttributeRead: return "VertexAttributeRead"; + case AccessFlagBits2::eUniformRead: return "UniformRead"; + case AccessFlagBits2::eInputAttachmentRead: return "InputAttachmentRead"; + case AccessFlagBits2::eShaderRead: return "ShaderRead"; + case AccessFlagBits2::eShaderWrite: return "ShaderWrite"; + case AccessFlagBits2::eColorAttachmentRead: return "ColorAttachmentRead"; + case AccessFlagBits2::eColorAttachmentWrite: return "ColorAttachmentWrite"; + case AccessFlagBits2::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead"; + case AccessFlagBits2::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite"; + case AccessFlagBits2::eTransferRead: return "TransferRead"; + case AccessFlagBits2::eTransferWrite: return "TransferWrite"; + case AccessFlagBits2::eHostRead: return "HostRead"; + case AccessFlagBits2::eHostWrite: return "HostWrite"; + case AccessFlagBits2::eMemoryRead: return "MemoryRead"; + case AccessFlagBits2::eMemoryWrite: return "MemoryWrite"; + case AccessFlagBits2::eShaderSampledRead: return "ShaderSampledRead"; + case AccessFlagBits2::eShaderStorageRead: return "ShaderStorageRead"; + case AccessFlagBits2::eShaderStorageWrite: return "ShaderStorageWrite"; + case AccessFlagBits2::eVideoDecodeReadKHR: return "VideoDecodeReadKHR"; + case AccessFlagBits2::eVideoDecodeWriteKHR: return "VideoDecodeWriteKHR"; + case AccessFlagBits2::eVideoEncodeReadKHR: return "VideoEncodeReadKHR"; + case AccessFlagBits2::eVideoEncodeWriteKHR: return "VideoEncodeWriteKHR"; + case AccessFlagBits2::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; + case AccessFlagBits2::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits2::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits2::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT"; + case AccessFlagBits2::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; + case AccessFlagBits2::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; + case AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; + case AccessFlagBits2::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; + case AccessFlagBits2::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; + case AccessFlagBits2::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits2::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits2::eDescriptorBufferReadEXT: return "DescriptorBufferReadEXT"; + case AccessFlagBits2::eInvocationMaskReadHUAWEI: return "InvocationMaskReadHUAWEI"; + case AccessFlagBits2::eShaderBindingTableReadKHR: return "ShaderBindingTableReadKHR"; + case AccessFlagBits2::eMicromapReadEXT: return "MicromapReadEXT"; + case AccessFlagBits2::eMicromapWriteEXT: return "MicromapWriteEXT"; + case AccessFlagBits2::eOpticalFlowReadNV: return "OpticalFlowReadNV"; + case AccessFlagBits2::eOpticalFlowWriteNV: return "OpticalFlowWriteNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( SubmitFlagBits value ) + { + switch ( value ) + { + case SubmitFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( RenderingFlagBits value ) + { + switch ( value ) + { + case RenderingFlagBits::eContentsSecondaryCommandBuffers: return "ContentsSecondaryCommandBuffers"; + case RenderingFlagBits::eSuspending: return "Suspending"; + case RenderingFlagBits::eResuming: return "Resuming"; + case RenderingFlagBits::eContentsInlineEXT: return "ContentsInlineEXT"; + case RenderingFlagBits::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits2 value ) + { + switch ( value ) + { + case FormatFeatureFlagBits2::eSampledImage: return "SampledImage"; + case FormatFeatureFlagBits2::eStorageImage: return "StorageImage"; + case FormatFeatureFlagBits2::eStorageImageAtomic: return "StorageImageAtomic"; + case FormatFeatureFlagBits2::eUniformTexelBuffer: return "UniformTexelBuffer"; + case FormatFeatureFlagBits2::eStorageTexelBuffer: return "StorageTexelBuffer"; + case FormatFeatureFlagBits2::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits2::eVertexBuffer: return "VertexBuffer"; + case FormatFeatureFlagBits2::eColorAttachment: return "ColorAttachment"; + case FormatFeatureFlagBits2::eColorAttachmentBlend: return "ColorAttachmentBlend"; + case FormatFeatureFlagBits2::eDepthStencilAttachment: return "DepthStencilAttachment"; + case FormatFeatureFlagBits2::eBlitSrc: return "BlitSrc"; + case FormatFeatureFlagBits2::eBlitDst: return "BlitDst"; + case FormatFeatureFlagBits2::eSampledImageFilterLinear: return "SampledImageFilterLinear"; + case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic"; + case FormatFeatureFlagBits2::eTransferSrc: return "TransferSrc"; + case FormatFeatureFlagBits2::eTransferDst: return "TransferDst"; + case FormatFeatureFlagBits2::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; + case FormatFeatureFlagBits2::eMidpointChromaSamples: return "MidpointChromaSamples"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter: return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter: return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit: return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable: + return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits2::eDisjoint: return "Disjoint"; + case FormatFeatureFlagBits2::eCositedChromaSamples: return "CositedChromaSamples"; + case FormatFeatureFlagBits2::eStorageReadWithoutFormat: return "StorageReadWithoutFormat"; + case FormatFeatureFlagBits2::eStorageWriteWithoutFormat: return "StorageWriteWithoutFormat"; + case FormatFeatureFlagBits2::eSampledImageDepthComparison: return "SampledImageDepthComparison"; + case FormatFeatureFlagBits2::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; + case FormatFeatureFlagBits2::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; + case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case FormatFeatureFlagBits2::eHostImageTransferEXT: return "HostImageTransferEXT"; + case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; + case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; + case FormatFeatureFlagBits2::eLinearColorAttachmentNV: return "LinearColorAttachmentNV"; + case FormatFeatureFlagBits2::eWeightImageQCOM: return "WeightImageQCOM"; + case FormatFeatureFlagBits2::eWeightSampledImageQCOM: return "WeightSampledImageQCOM"; + case FormatFeatureFlagBits2::eBlockMatchingQCOM: return "BlockMatchingQCOM"; + case FormatFeatureFlagBits2::eBoxFilterSampledQCOM: return "BoxFilterSampledQCOM"; + case FormatFeatureFlagBits2::eOpticalFlowImageNV: return "OpticalFlowImageNV"; + case FormatFeatureFlagBits2::eOpticalFlowVectorNV: return "OpticalFlowVectorNV"; + case FormatFeatureFlagBits2::eOpticalFlowCostNV: return "OpticalFlowCostNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_surface === + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) + { + switch ( value ) + { + case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value ) + { + switch ( value ) + { + case PresentModeKHR::eImmediate: return "Immediate"; + case PresentModeKHR::eMailbox: return "Mailbox"; + case PresentModeKHR::eFifo: return "Fifo"; + case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value ) + { + switch ( value ) + { + case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT"; + case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value ) + { + switch ( value ) + { + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; + case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat"; + case SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT: return "DeferredMemoryAllocationEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value ) + { + switch ( value ) + { + case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_display === + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value ) + { + switch ( value ) + { + case DebugReportFlagBitsEXT::eInformation: return "Information"; + case DebugReportFlagBitsEXT::eWarning: return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError: return "Error"; + case DebugReportFlagBitsEXT::eDebug: return "Debug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) + { + switch ( value ) + { + case DebugReportObjectTypeEXT::eUnknown: return "Unknown"; + case DebugReportObjectTypeEXT::eInstance: return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice: return "Device"; + case DebugReportObjectTypeEXT::eQueue: return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence: return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer: return "Buffer"; + case DebugReportObjectTypeEXT::eImage: return "Image"; + case DebugReportObjectTypeEXT::eEvent: return "Event"; + case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView: return "BufferView"; + case DebugReportObjectTypeEXT::eImageView: return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline: return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler: return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR"; + case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR"; + case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; + case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR"; + case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR"; + case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate"; + case DebugReportObjectTypeEXT::eCuModuleNVX: return "CuModuleNVX"; + case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX"; + case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR"; + case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case DebugReportObjectTypeEXT::eCudaModuleNV: return "CudaModuleNV"; + case DebugReportObjectTypeEXT::eCudaFunctionNV: return "CudaFunctionNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_rasterization_order === + + VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value ) + { + switch ( value ) + { + case RasterizationOrderAMD::eStrict: return "Strict"; + case RasterizationOrderAMD::eRelaxed: return "Relaxed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_video_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodecOperationFlagBitsKHR::eNone: return "None"; + case VideoCodecOperationFlagBitsKHR::eEncodeH264: return "EncodeH264"; + case VideoCodecOperationFlagBitsKHR::eEncodeH265: return "EncodeH265"; + case VideoCodecOperationFlagBitsKHR::eDecodeH264: return "DecodeH264"; + case VideoCodecOperationFlagBitsKHR::eDecodeH265: return "DecodeH265"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagBitsKHR value ) + { + switch ( value ) + { + case VideoChromaSubsamplingFlagBitsKHR::eInvalid: return "Invalid"; + case VideoChromaSubsamplingFlagBitsKHR::eMonochrome: return "Monochrome"; + case VideoChromaSubsamplingFlagBitsKHR::e420: return "420"; + case VideoChromaSubsamplingFlagBitsKHR::e422: return "422"; + case VideoChromaSubsamplingFlagBitsKHR::e444: return "444"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagBitsKHR value ) + { + switch ( value ) + { + case VideoComponentBitDepthFlagBitsKHR::eInvalid: return "Invalid"; + case VideoComponentBitDepthFlagBitsKHR::e8: return "8"; + case VideoComponentBitDepthFlagBitsKHR::e10: return "10"; + case VideoComponentBitDepthFlagBitsKHR::e12: return "12"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoCapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCapabilityFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + case VideoCapabilityFlagBitsKHR::eSeparateReferenceImages: return "SeparateReferenceImages"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagBitsKHR value ) + { + switch ( value ) + { + case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations: return "AllowEncodeParameterOptimizations"; + case VideoSessionCreateFlagBitsKHR::eInlineQueries: return "InlineQueries"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoCodingControlFlagBitsKHR::eReset: return "Reset"; + case VideoCodingControlFlagBitsKHR::eEncodeRateControl: return "EncodeRateControl"; + case VideoCodingControlFlagBitsKHR::eEncodeQualityLevel: return "EncodeQualityLevel"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( QueryResultStatusKHR value ) + { + switch ( value ) + { + case QueryResultStatusKHR::eError: return "Error"; + case QueryResultStatusKHR::eNotReady: return "NotReady"; + case QueryResultStatusKHR::eComplete: return "Complete"; + case QueryResultStatusKHR::eInsufficientBitstreamBufferRange: return "InsufficientBitstreamBufferRange"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagBitsKHR ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_KHR_video_decode_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide: return "DpbAndOutputCoincide"; + case VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct: return "DpbAndOutputDistinct"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeUsageFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeUsageFlagBitsKHR::eDefault: return "Default"; + case VideoDecodeUsageFlagBitsKHR::eTranscoding: return "Transcoding"; + case VideoDecodeUsageFlagBitsKHR::eOffline: return "Offline"; + case VideoDecodeUsageFlagBitsKHR::eStreaming: return "Streaming"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_EXT_transform_feedback === + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_KHR_video_encode_h264 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance: return "HrdCompliance"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated: return "PredictionWeightTableGenerated"; + case VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice: return "RowUnalignedSlice"; + case VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType: return "DifferentSliceType"; + case VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List: return "BFrameInL0List"; + case VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List: return "BFrameInL1List"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp: return "PerPictureTypeMinMaxQp"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp: return "PerSliceConstantQp"; + case VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu: return "GeneratePrefixNalu"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264StdFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet: return "SeparateColorPlaneFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet: return "QpprimeYZeroTransformBypassFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet: return "ScalingMatrixPresentFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset: return "ChromaQpIndexOffset"; + case VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset: return "SecondChromaQpIndexOffset"; + case VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26: return "PicInitQpMinus26"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet: return "WeightedPredFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit: return "WeightedBipredIdcExplicit"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit: return "WeightedBipredIdcImplicit"; + case VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet: return "Transform8X8ModeFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset: return "DirectSpatialMvPredFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset: return "EntropyCodingModeFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet: return "EntropyCodingModeFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset: return "Direct8X8InferenceFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet: return "ConstrainedIntraPredFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled: return "DeblockingFilterDisabled"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled: return "DeblockingFilterEnabled"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial: return "DeblockingFilterPartial"; + case VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta: return "SliceQpDelta"; + case VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta: return "DifferentSliceQpDelta"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance: return "AttemptHrdCompliance"; + case VideoEncodeH264RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + case VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic: return "TemporalLayerPatternDyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_video_encode_h265 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance: return "HrdCompliance"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated: return "PredictionWeightTableGenerated"; + case VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment: return "RowUnalignedSliceSegment"; + case VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType: return "DifferentSliceSegmentType"; + case VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List: return "BFrameInL0List"; + case VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List: return "BFrameInL1List"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp: return "PerPictureTypeMinMaxQp"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp: return "PerSliceSegmentConstantQp"; + case VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment: return "MultipleTilesPerSliceSegment"; + case VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile: return "MultipleSliceSegmentsPerTile"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265StdFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet: return "SeparateColorPlaneFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet: return "SampleAdaptiveOffsetEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet: return "ScalingListDataPresentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet: return "PcmEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet: return "SpsTemporalMvpEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26: return "InitQpMinus26"; + case VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet: return "WeightedPredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet: return "WeightedBipredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2: return "Log2ParallelMergeLevelMinus2"; + case VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet: return "SignDataHidingEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet: return "TransformSkipEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset: return "TransformSkipEnabledFlagUnset"; + case VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet: return "PpsSliceChromaQpOffsetsPresentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet: return "TransquantBypassEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet: return "ConstrainedIntraPredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet: return "EntropyCodingSyncEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet: return "DeblockingFilterOverrideEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet: return "DependentSliceSegmentsEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet: return "DependentSliceSegmentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta: return "SliceQpDelta"; + case VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta: return "DifferentSliceQpDelta"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265CtbSizeFlagBitsKHR::e16: return "16"; + case VideoEncodeH265CtbSizeFlagBitsKHR::e32: return "32"; + case VideoEncodeH265CtbSizeFlagBitsKHR::e64: return "64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4: return "4"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8: return "8"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16: return "16"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32: return "32"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance: return "AttemptHrdCompliance"; + case VideoEncodeH265RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + case VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic: return "TemporalSubLayerPatternDyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_video_decode_h264 === + + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagBitsKHR value ) + { + switch ( value ) + { + case VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive: return "Progressive"; + case VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines: return "InterlacedInterleavedLines"; + case VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes: return "InterlacedSeparatePlanes"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_shader_info === + + VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value ) + { + switch ( value ) + { + case ShaderInfoTypeAMD::eStatistics: return "Statistics"; + case ShaderInfoTypeAMD::eBinary: return "Binary"; + case ShaderInfoTypeAMD::eDisassembly: return "Disassembly"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_validation_flags === + + VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value ) + { + switch ( value ) + { + case ValidationCheckEXT::eAll: return "All"; + case ValidationCheckEXT::eShaders: return "Shaders"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_pipeline_robustness === + + VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessBufferBehaviorEXT value ) + { + switch ( value ) + { + case PipelineRobustnessBufferBehaviorEXT::eDeviceDefault: return "DeviceDefault"; + case PipelineRobustnessBufferBehaviorEXT::eDisabled: return "Disabled"; + case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess: return "RobustBufferAccess"; + case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess2: return "RobustBufferAccess2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessImageBehaviorEXT value ) + { + switch ( value ) + { + case PipelineRobustnessImageBehaviorEXT::eDeviceDefault: return "DeviceDefault"; + case PipelineRobustnessImageBehaviorEXT::eDisabled: return "Disabled"; + case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess: return "RobustImageAccess"; + case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess2: return "RobustImageAccess2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_conditional_rendering === + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) + { + switch ( value ) + { + case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value ) + { + switch ( value ) + { + case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_display_control === + + VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value ) + { + switch ( value ) + { + case DisplayPowerStateEXT::eOff: return "Off"; + case DisplayPowerStateEXT::eSuspend: return "Suspend"; + case DisplayPowerStateEXT::eOn: return "On"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value ) + { + switch ( value ) + { + case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value ) + { + switch ( value ) + { + case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_viewport_swizzle === + + VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value ) + { + switch ( value ) + { + case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_discard_rectangles === + + VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value ) + { + switch ( value ) + { + case DiscardRectangleModeEXT::eInclusive: return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive: return "Exclusive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_conservative_rasterization === + + VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value ) + { + switch ( value ) + { + case ConservativeRasterizationModeEXT::eDisabled: return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_depth_clip_enable === + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_KHR_performance_query === + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value ) + { + switch ( value ) + { + case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting"; + case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value ) + { + switch ( value ) + { + case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer"; + case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass"; + case PerformanceCounterScopeKHR::eCommand: return "Command"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value ) + { + switch ( value ) + { + case PerformanceCounterStorageKHR::eInt32: return "Int32"; + case PerformanceCounterStorageKHR::eInt64: return "Int64"; + case PerformanceCounterStorageKHR::eUint32: return "Uint32"; + case PerformanceCounterStorageKHR::eUint64: return "Uint64"; + case PerformanceCounterStorageKHR::eFloat32: return "Float32"; + case PerformanceCounterStorageKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value ) + { + switch ( value ) + { + case PerformanceCounterUnitKHR::eGeneric: return "Generic"; + case PerformanceCounterUnitKHR::ePercentage: return "Percentage"; + case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds"; + case PerformanceCounterUnitKHR::eBytes: return "Bytes"; + case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond"; + case PerformanceCounterUnitKHR::eKelvin: return "Kelvin"; + case PerformanceCounterUnitKHR::eWatts: return "Watts"; + case PerformanceCounterUnitKHR::eVolts: return "Volts"; + case PerformanceCounterUnitKHR::eAmps: return "Amps"; + case PerformanceCounterUnitKHR::eHertz: return "Hertz"; + case PerformanceCounterUnitKHR::eCycles: return "Cycles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR ) + { + return "(void)"; + } + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; + case DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding: return "DeviceAddressBinding"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_blend_operation_advanced === + + VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value ) + { + switch ( value ) + { + case BlendOverlapEXT::eUncorrelated: return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint: return "Disjoint"; + case BlendOverlapEXT::eConjoint: return "Conjoint"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_fragment_coverage_to_color === + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_KHR_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel"; + case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel"; + case AccelerationStructureTypeKHR::eGeneric: return "Generic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value ) + { + switch ( value ) + { + case AccelerationStructureBuildTypeKHR::eHost: return "Host"; + case AccelerationStructureBuildTypeKHR::eDevice: return "Device"; + case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryFlagBitsKHR::eOpaque: return "Opaque"; + case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value ) + { + switch ( value ) + { + case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable"; + case GeometryInstanceFlagBitsKHR::eTriangleFlipFacing: return "TriangleFlipFacing"; + case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque"; + case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque"; + case GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT: return "ForceOpacityMicromap2StateEXT"; + case GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT: return "DisableOpacityMicromapsEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory"; + case BuildAccelerationStructureFlagBitsKHR::eMotionNV: return "MotionNV"; + case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT: return "AllowOpacityMicromapUpdateEXT"; + case BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT: return "AllowDisableOpacityMicromapsEXT"; + case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT: return "AllowOpacityMicromapDataUpdateEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV: return "AllowDisplacementMicromapUpdateNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess: return "AllowDataAccess"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case CopyAccelerationStructureModeKHR::eClone: return "Clone"; + case CopyAccelerationStructureModeKHR::eCompact: return "Compact"; + case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize"; + case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value ) + { + switch ( value ) + { + case GeometryTypeKHR::eTriangles: return "Triangles"; + case GeometryTypeKHR::eAabbs: return "Aabbs"; + case GeometryTypeKHR::eInstances: return "Instances"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value ) + { + switch ( value ) + { + case AccelerationStructureCompatibilityKHR::eCompatible: return "Compatible"; + case AccelerationStructureCompatibilityKHR::eIncompatible: return "Incompatible"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value ) + { + switch ( value ) + { + case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + case AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case AccelerationStructureCreateFlagBitsKHR::eMotionNV: return "MotionNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value ) + { + switch ( value ) + { + case BuildAccelerationStructureModeKHR::eBuild: return "Build"; + case BuildAccelerationStructureModeKHR::eUpdate: return "Update"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_ray_tracing_pipeline === + + VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) + { + switch ( value ) + { + case RayTracingShaderGroupTypeKHR::eGeneral: return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value ) + { + switch ( value ) + { + case ShaderGroupShaderKHR::eGeneral: return "General"; + case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit"; + case ShaderGroupShaderKHR::eAnyHit: return "AnyHit"; + case ShaderGroupShaderKHR::eIntersection: return "Intersection"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_framebuffer_mixed_samples === + + VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) + { + switch ( value ) + { + case CoverageModulationModeNV::eNone: return "None"; + case CoverageModulationModeNV::eRgb: return "Rgb"; + case CoverageModulationModeNV::eAlpha: return "Alpha"; + case CoverageModulationModeNV::eRgba: return "Rgba"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_validation_cache === + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value ) + { + switch ( value ) + { + case ValidationCacheHeaderVersionEXT::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_NV_shading_rate_image === + + VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value ) + { + switch ( value ) + { + case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations"; + case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value ) + { + switch ( value ) + { + case CoarseSampleOrderTypeNV::eDefault: return "Default"; + case CoarseSampleOrderTypeNV::eCustom: return "Custom"; + case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor"; + case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing === + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object"; + case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_pipeline_compiler_control === + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_KHR_global_priority === + + VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityKHR value ) + { + switch ( value ) + { + case QueueGlobalPriorityKHR::eLow: return "Low"; + case QueueGlobalPriorityKHR::eMedium: return "Medium"; + case QueueGlobalPriorityKHR::eHigh: return "High"; + case QueueGlobalPriorityKHR::eRealtime: return "Realtime"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_memory_overallocation_behavior === + + VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) + { + switch ( value ) + { + case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_INTEL_performance_query === + + VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) + { + switch ( value ) + { + case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: return "CommandQueueMetricsDiscoveryActivated"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value ) + { + switch ( value ) + { + case QueryPoolSamplingModeINTEL::eManual: return "Manual"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value ) + { + switch ( value ) + { + case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware"; + case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value ) + { + switch ( value ) + { + case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported"; + case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value ) + { + switch ( value ) + { + case PerformanceValueTypeINTEL::eUint32: return "Uint32"; + case PerformanceValueTypeINTEL::eUint64: return "Uint64"; + case PerformanceValueTypeINTEL::eFloat: return "Float"; + case PerformanceValueTypeINTEL::eBool: return "Bool"; + case PerformanceValueTypeINTEL::eString: return "String"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value ) + { + switch ( value ) + { + case FragmentShadingRateCombinerOpKHR::eKeep: return "Keep"; + case FragmentShadingRateCombinerOpKHR::eReplace: return "Replace"; + case FragmentShadingRateCombinerOpKHR::eMin: return "Min"; + case FragmentShadingRateCombinerOpKHR::eMax: return "Max"; + case FragmentShadingRateCombinerOpKHR::eMul: return "Mul"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_shader_core_properties2 === + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) + { + return "(void)"; + } + + //=== VK_EXT_validation_features === + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) + { + switch ( value ) + { + case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted"; + case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot"; + case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices"; + case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf"; + case ValidationFeatureEnableEXT::eSynchronizationValidation: return "SynchronizationValidation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value ) + { + switch ( value ) + { + case ValidationFeatureDisableEXT::eAll: return "All"; + case ValidationFeatureDisableEXT::eShaders: return "Shaders"; + case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety"; + case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters"; + case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes"; + case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks"; + case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles"; + case ValidationFeatureDisableEXT::eShaderValidationCache: return "ShaderValidationCache"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_coverage_reduction_mode === + + VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) + { + switch ( value ) + { + case CoverageReductionModeNV::eMerge: return "Merge"; + case CoverageReductionModeNV::eTruncate: return "Truncate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_provoking_vertex === + + VULKAN_HPP_INLINE std::string to_string( ProvokingVertexModeEXT value ) + { + switch ( value ) + { + case ProvokingVertexModeEXT::eFirstVertex: return "FirstVertex"; + case ProvokingVertexModeEXT::eLastVertex: return "LastVertex"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value ) + { + switch ( value ) + { + case FullScreenExclusiveEXT::eDefault: return "Default"; + case FullScreenExclusiveEXT::eAllowed: return "Allowed"; + case FullScreenExclusiveEXT::eDisallowed: return "Disallowed"; + case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_EXT_line_rasterization === + + VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value ) + { + switch ( value ) + { + case LineRasterizationModeEXT::eDefault: return "Default"; + case LineRasterizationModeEXT::eRectangular: return "Rectangular"; + case LineRasterizationModeEXT::eBresenham: return "Bresenham"; + case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_pipeline_executable_properties === + + VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) + { + switch ( value ) + { + case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32"; + case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64"; + case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64"; + case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagBitsEXT value ) + { + switch ( value ) + { + case HostImageCopyFlagBitsEXT::eMemcpy: return "Memcpy"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_map_memory2 === + + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_EXT_surface_maintenance1 === + + VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagBitsEXT value ) + { + switch ( value ) + { + case PresentScalingFlagBitsEXT::eOneToOne: return "OneToOne"; + case PresentScalingFlagBitsEXT::eAspectRatioStretch: return "AspectRatioStretch"; + case PresentScalingFlagBitsEXT::eStretch: return "Stretch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PresentGravityFlagBitsEXT value ) + { + switch ( value ) + { + case PresentGravityFlagBitsEXT::eMin: return "Min"; + case PresentGravityFlagBitsEXT::eMax: return "Max"; + case PresentGravityFlagBitsEXT::eCentered: return "Centered"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_device_generated_commands === + + VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value ) + { + switch ( value ) + { + case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup"; + case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags"; + case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeNV::eDraw: return "Draw"; + case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks"; + case IndirectCommandsTokenTypeNV::eDrawMeshTasks: return "DrawMeshTasks"; + case IndirectCommandsTokenTypeNV::ePipeline: return "Pipeline"; + case IndirectCommandsTokenTypeNV::eDispatch: return "Dispatch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_depth_bias_control === + + VULKAN_HPP_INLINE std::string to_string( DepthBiasRepresentationEXT value ) + { + switch ( value ) + { + case DepthBiasRepresentationEXT::eLeastRepresentableValueFormat: return "LeastRepresentableValueFormat"; + case DepthBiasRepresentationEXT::eLeastRepresentableValueForceUnorm: return "LeastRepresentableValueForceUnorm"; + case DepthBiasRepresentationEXT::eFloat: return "Float"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_memory_report === + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value ) + { + switch ( value ) + { + case DeviceMemoryReportEventTypeEXT::eAllocate: return "Allocate"; + case DeviceMemoryReportEventTypeEXT::eFree: return "Free"; + case DeviceMemoryReportEventTypeEXT::eImport: return "Import"; + case DeviceMemoryReportEventTypeEXT::eUnimport: return "Unimport"; + case DeviceMemoryReportEventTypeEXT::eAllocationFailed: return "AllocationFailed"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT ) + { + return "(void)"; + } + + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes: return "PrecedingExternallyEncodedBytes"; + case VideoEncodeCapabilityFlagBitsKHR::eInsufficientstreamBufferRangeDetectionBit: return "InsufficientstreamBufferRangeDetectionBit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFeedbackFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeFeedbackFlagBitsKHR::estreamBufferOffsetBit: return "streamBufferOffsetBit"; + case VideoEncodeFeedbackFlagBitsKHR::estreamBytesWrittenBit: return "streamBytesWrittenBit"; + case VideoEncodeFeedbackFlagBitsKHR::estreamHasOverridesBit: return "streamHasOverridesBit"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeUsageFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeUsageFlagBitsKHR::eTranscoding: return "Transcoding"; + case VideoEncodeUsageFlagBitsKHR::eStreaming: return "Streaming"; + case VideoEncodeUsageFlagBitsKHR::eRecording: return "Recording"; + case VideoEncodeUsageFlagBitsKHR::eConferencing: return "Conferencing"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeContentFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeContentFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeContentFlagBitsKHR::eCamera: return "Camera"; + case VideoEncodeContentFlagBitsKHR::eDesktop: return "Desktop"; + case VideoEncodeContentFlagBitsKHR::eRendered: return "Rendered"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeTuningModeKHR value ) + { + switch ( value ) + { + case VideoEncodeTuningModeKHR::eDefault: return "Default"; + case VideoEncodeTuningModeKHR::eHighQuality: return "HighQuality"; + case VideoEncodeTuningModeKHR::eLowLatency: return "LowLatency"; + case VideoEncodeTuningModeKHR::eUltraLowLatency: return "UltraLowLatency"; + case VideoEncodeTuningModeKHR::eLossless: return "Lossless"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeRateControlModeFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeRateControlModeFlagBitsKHR::eDisabled: return "Disabled"; + case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr"; + case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR ) + { + return "(void)"; + } + + //=== VK_NV_device_diagnostics_config === + + VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value ) + { + switch ( value ) + { + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints"; + case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting: return "EnableShaderErrorReporting"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + VULKAN_HPP_INLINE std::string to_string( ExportMetalObjectTypeFlagBitsEXT value ) + { + switch ( value ) + { + case ExportMetalObjectTypeFlagBitsEXT::eMetalDevice: return "MetalDevice"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue: return "MetalCommandQueue"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer: return "MetalBuffer"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalTexture: return "MetalTexture"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface: return "MetalIosurface"; + case ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent: return "MetalSharedEvent"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + + VULKAN_HPP_INLINE std::string to_string( GraphicsPipelineLibraryFlagBitsEXT value ) + { + switch ( value ) + { + case GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface: return "VertexInputInterface"; + case GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders: return "PreRasterizationShaders"; + case GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader: return "FragmentShader"; + case GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface: return "FragmentOutputInterface"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_fragment_shading_rate_enums === + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value ) + { + switch ( value ) + { + case FragmentShadingRateNV::e1InvocationPerPixel: return "1InvocationPerPixel"; + case FragmentShadingRateNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels"; + case FragmentShadingRateNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels"; + case FragmentShadingRateNV::e2InvocationsPerPixel: return "2InvocationsPerPixel"; + case FragmentShadingRateNV::e4InvocationsPerPixel: return "4InvocationsPerPixel"; + case FragmentShadingRateNV::e8InvocationsPerPixel: return "8InvocationsPerPixel"; + case FragmentShadingRateNV::e16InvocationsPerPixel: return "16InvocationsPerPixel"; + case FragmentShadingRateNV::eNoInvocations: return "NoInvocations"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value ) + { + switch ( value ) + { + case FragmentShadingRateTypeNV::eFragmentSize: return "FragmentSize"; + case FragmentShadingRateTypeNV::eEnums: return "Enums"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing_motion_blur === + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMotionInstanceTypeNV::eStatic: return "Static"; + case AccelerationStructureMotionInstanceTypeNV::eMatrixMotion: return "MatrixMotion"; + case AccelerationStructureMotionInstanceTypeNV::eSrtMotion: return "SrtMotion"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInfoFlagBitsNV ) + { + return "(void)"; + } + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMotionInstanceFlagBitsNV ) + { + return "(void)"; + } + + //=== VK_EXT_image_compression_control === + + VULKAN_HPP_INLINE std::string to_string( ImageCompressionFlagBitsEXT value ) + { + switch ( value ) + { + case ImageCompressionFlagBitsEXT::eDefault: return "Default"; + case ImageCompressionFlagBitsEXT::eFixedRateDefault: return "FixedRateDefault"; + case ImageCompressionFlagBitsEXT::eFixedRateExplicit: return "FixedRateExplicit"; + case ImageCompressionFlagBitsEXT::eDisabled: return "Disabled"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ImageCompressionFixedRateFlagBitsEXT value ) + { + switch ( value ) + { + case ImageCompressionFixedRateFlagBitsEXT::eNone: return "None"; + case ImageCompressionFixedRateFlagBitsEXT::e1Bpc: return "1Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e2Bpc: return "2Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e3Bpc: return "3Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e4Bpc: return "4Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e5Bpc: return "5Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e6Bpc: return "6Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e7Bpc: return "7Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e8Bpc: return "8Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e9Bpc: return "9Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e10Bpc: return "10Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e11Bpc: return "11Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e12Bpc: return "12Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e13Bpc: return "13Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e14Bpc: return "14Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e15Bpc: return "15Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e16Bpc: return "16Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e17Bpc: return "17Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e18Bpc: return "18Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e19Bpc: return "19Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e20Bpc: return "20Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e21Bpc: return "21Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e22Bpc: return "22Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e23Bpc: return "23Bpc"; + case ImageCompressionFixedRateFlagBitsEXT::e24Bpc: return "24Bpc"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_fault === + + VULKAN_HPP_INLINE std::string to_string( DeviceFaultAddressTypeEXT value ) + { + switch ( value ) + { + case DeviceFaultAddressTypeEXT::eNone: return "None"; + case DeviceFaultAddressTypeEXT::eReadInvalid: return "ReadInvalid"; + case DeviceFaultAddressTypeEXT::eWriteInvalid: return "WriteInvalid"; + case DeviceFaultAddressTypeEXT::eExecuteInvalid: return "ExecuteInvalid"; + case DeviceFaultAddressTypeEXT::eInstructionPointerUnknown: return "InstructionPointerUnknown"; + case DeviceFaultAddressTypeEXT::eInstructionPointerInvalid: return "InstructionPointerInvalid"; + case DeviceFaultAddressTypeEXT::eInstructionPointerFault: return "InstructionPointerFault"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DeviceFaultVendorBinaryHeaderVersionEXT value ) + { + switch ( value ) + { + case DeviceFaultVendorBinaryHeaderVersionEXT::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + + VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + + VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagBitsEXT value ) + { + switch ( value ) + { + case DeviceAddressBindingFlagBitsEXT::eInternalObject: return "InternalObject"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingTypeEXT value ) + { + switch ( value ) + { + case DeviceAddressBindingTypeEXT::eBind: return "Bind"; + case DeviceAddressBindingTypeEXT::eUnbind: return "Unbind"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VULKAN_HPP_INLINE std::string to_string( ImageConstraintsInfoFlagBitsFUCHSIA value ) + { + switch ( value ) + { + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely: return "CpuReadRarely"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften: return "CpuReadOften"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely: return "CpuWriteRarely"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften: return "CpuWriteOften"; + case ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional: return "ProtectedOptional"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ImageFormatConstraintsFlagBitsFUCHSIA ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_frame_boundary === + + VULKAN_HPP_INLINE std::string to_string( FrameBoundaryFlagBitsEXT value ) + { + switch ( value ) + { + case FrameBoundaryFlagBitsEXT::eFrameEnd: return "FrameEnd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagBitsQNX ) + { + return "(void)"; + } +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_INLINE std::string to_string( MicromapTypeEXT value ) + { + switch ( value ) + { + case MicromapTypeEXT::eOpacityMicromap: return "OpacityMicromap"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case MicromapTypeEXT::eDisplacementMicromapNV: return "DisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagBitsEXT value ) + { + switch ( value ) + { + case BuildMicromapFlagBitsEXT::ePreferFastTrace: return "PreferFastTrace"; + case BuildMicromapFlagBitsEXT::ePreferFastBuild: return "PreferFastBuild"; + case BuildMicromapFlagBitsEXT::eAllowCompaction: return "AllowCompaction"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CopyMicromapModeEXT value ) + { + switch ( value ) + { + case CopyMicromapModeEXT::eClone: return "Clone"; + case CopyMicromapModeEXT::eSerialize: return "Serialize"; + case CopyMicromapModeEXT::eDeserialize: return "Deserialize"; + case CopyMicromapModeEXT::eCompact: return "Compact"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagBitsEXT value ) + { + switch ( value ) + { + case MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BuildMicromapModeEXT value ) + { + switch ( value ) + { + case BuildMicromapModeEXT::eBuild: return "Build"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpacityMicromapFormatEXT value ) + { + switch ( value ) + { + case OpacityMicromapFormatEXT::e2State: return "2State"; + case OpacityMicromapFormatEXT::e4State: return "4State"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpacityMicromapSpecialIndexEXT value ) + { + switch ( value ) + { + case OpacityMicromapSpecialIndexEXT::eFullyTransparent: return "FullyTransparent"; + case OpacityMicromapSpecialIndexEXT::eFullyOpaque: return "FullyOpaque"; + case OpacityMicromapSpecialIndexEXT::eFullyUnknownTransparent: return "FullyUnknownTransparent"; + case OpacityMicromapSpecialIndexEXT::eFullyUnknownOpaque: return "FullyUnknownOpaque"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + + VULKAN_HPP_INLINE std::string to_string( DisplacementMicromapFormatNV value ) + { + switch ( value ) + { + case DisplacementMicromapFormatNV::e64Triangles64Bytes: return "64Triangles64Bytes"; + case DisplacementMicromapFormatNV::e256Triangles128Bytes: return "256Triangles128Bytes"; + case DisplacementMicromapFormatNV::e1024Triangles128Bytes: return "1024Triangles128Bytes"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_ARM_scheduling_controls === + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagBitsARM value ) + { + switch ( value ) + { + case PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount: return "ShaderCoreCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagBitsNV value ) + { + switch ( value ) + { + case MemoryDecompressionMethodFlagBitsNV::eGdeflate10: return "Gdeflate10"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_subpass_merge_feedback === + + VULKAN_HPP_INLINE std::string to_string( SubpassMergeStatusEXT value ) + { + switch ( value ) + { + case SubpassMergeStatusEXT::eMerged: return "Merged"; + case SubpassMergeStatusEXT::eDisallowed: return "Disallowed"; + case SubpassMergeStatusEXT::eNotMergedSideEffects: return "NotMergedSideEffects"; + case SubpassMergeStatusEXT::eNotMergedSamplesMismatch: return "NotMergedSamplesMismatch"; + case SubpassMergeStatusEXT::eNotMergedViewsMismatch: return "NotMergedViewsMismatch"; + case SubpassMergeStatusEXT::eNotMergedAliasing: return "NotMergedAliasing"; + case SubpassMergeStatusEXT::eNotMergedDependencies: return "NotMergedDependencies"; + case SubpassMergeStatusEXT::eNotMergedIncompatibleInputAttachment: return "NotMergedIncompatibleInputAttachment"; + case SubpassMergeStatusEXT::eNotMergedTooManyAttachments: return "NotMergedTooManyAttachments"; + case SubpassMergeStatusEXT::eNotMergedInsufficientStorage: return "NotMergedInsufficientStorage"; + case SubpassMergeStatusEXT::eNotMergedDepthStencilCount: return "NotMergedDepthStencilCount"; + case SubpassMergeStatusEXT::eNotMergedResolveAttachmentReuse: return "NotMergedResolveAttachmentReuse"; + case SubpassMergeStatusEXT::eNotMergedSingleSubpass: return "NotMergedSingleSubpass"; + case SubpassMergeStatusEXT::eNotMergedUnspecified: return "NotMergedUnspecified"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_LUNARG_direct_driver_loading === + + VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingModeLUNARG value ) + { + switch ( value ) + { + case DirectDriverLoadingModeLUNARG::eExclusive: return "Exclusive"; + case DirectDriverLoadingModeLUNARG::eInclusive: return "Inclusive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingFlagBitsLUNARG ) + { + return "(void)"; + } + + //=== VK_NV_optical_flow === + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowUsageFlagBitsNV::eUnknown: return "Unknown"; + case OpticalFlowUsageFlagBitsNV::eInput: return "Input"; + case OpticalFlowUsageFlagBitsNV::eOutput: return "Output"; + case OpticalFlowUsageFlagBitsNV::eHint: return "Hint"; + case OpticalFlowUsageFlagBitsNV::eCost: return "Cost"; + case OpticalFlowUsageFlagBitsNV::eGlobalFlow: return "GlobalFlow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowGridSizeFlagBitsNV::eUnknown: return "Unknown"; + case OpticalFlowGridSizeFlagBitsNV::e1X1: return "1X1"; + case OpticalFlowGridSizeFlagBitsNV::e2X2: return "2X2"; + case OpticalFlowGridSizeFlagBitsNV::e4X4: return "4X4"; + case OpticalFlowGridSizeFlagBitsNV::e8X8: return "8X8"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowPerformanceLevelNV value ) + { + switch ( value ) + { + case OpticalFlowPerformanceLevelNV::eUnknown: return "Unknown"; + case OpticalFlowPerformanceLevelNV::eSlow: return "Slow"; + case OpticalFlowPerformanceLevelNV::eMedium: return "Medium"; + case OpticalFlowPerformanceLevelNV::eFast: return "Fast"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionBindingPointNV value ) + { + switch ( value ) + { + case OpticalFlowSessionBindingPointNV::eUnknown: return "Unknown"; + case OpticalFlowSessionBindingPointNV::eInput: return "Input"; + case OpticalFlowSessionBindingPointNV::eReference: return "Reference"; + case OpticalFlowSessionBindingPointNV::eHint: return "Hint"; + case OpticalFlowSessionBindingPointNV::eFlowVector: return "FlowVector"; + case OpticalFlowSessionBindingPointNV::eBackwardFlowVector: return "BackwardFlowVector"; + case OpticalFlowSessionBindingPointNV::eCost: return "Cost"; + case OpticalFlowSessionBindingPointNV::eBackwardCost: return "BackwardCost"; + case OpticalFlowSessionBindingPointNV::eGlobalFlow: return "GlobalFlow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowSessionCreateFlagBitsNV::eEnableHint: return "EnableHint"; + case OpticalFlowSessionCreateFlagBitsNV::eEnableCost: return "EnableCost"; + case OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow: return "EnableGlobalFlow"; + case OpticalFlowSessionCreateFlagBitsNV::eAllowRegions: return "AllowRegions"; + case OpticalFlowSessionCreateFlagBitsNV::eBothDirections: return "BothDirections"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints: return "DisableTemporalHints"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits2KHR value ) + { + switch ( value ) + { + case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative"; + case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits2KHR::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits2KHR::eCaptureStatistics: return "CaptureStatistics"; + case PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations: return "CaptureInternalRepresentations"; + case PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; + case PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; + case PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; + case PipelineCreateFlagBits2KHR::eLibrary: return "Library"; + case PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles: return "RayTracingSkipTriangles"; + case PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs: return "RayTracingSkipAabbs"; + case PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders: return "RayTracingNoNullAnyHitShaders"; + case PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders: return "RayTracingNoNullClosestHitShaders"; + case PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders: return "RayTracingNoNullMissShaders"; + case PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders: return "RayTracingNoNullIntersectionShaders"; + case PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay: return "RayTracingShaderGroupHandleCaptureReplay"; + case PipelineCreateFlagBits2KHR::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; + case PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment: return "RenderingFragmentShadingRateAttachment"; + case PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; + case PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT: return "NoProtectedAccessEXT"; + case PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT: return "ProtectedAccessOnlyEXT"; + case PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; + case PipelineCreateFlagBits2KHR::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits2KHR value ) + { + switch ( value ) + { + case BufferUsageFlagBits2KHR::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits2KHR::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits2KHR::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits2KHR::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits2KHR::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits2KHR::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits2KHR::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits2KHR::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits2KHR::eIndirectBuffer: return "IndirectBuffer"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits2KHR::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits2KHR::eShaderBindingTable: return "ShaderBindingTable"; + case BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits2KHR::eVideoDecodeSrc: return "VideoDecodeSrc"; + case BufferUsageFlagBits2KHR::eVideoDecodeDst: return "VideoDecodeDst"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits2KHR::eVideoEncodeDst: return "VideoEncodeDst"; + case BufferUsageFlagBits2KHR::eVideoEncodeSrc: return "VideoEncodeSrc"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits2KHR::eShaderDeviceAddress: return "ShaderDeviceAddress"; + case BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly: return "AccelerationStructureBuildInputReadOnly"; + case BufferUsageFlagBits2KHR::eAccelerationStructureStorage: return "AccelerationStructureStorage"; + case BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; + case BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; + case BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; + case BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; + case BufferUsageFlagBits2KHR::eMicromapStorageEXT: return "MicromapStorageEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value ) + { + switch ( value ) + { + case ShaderCreateFlagBitsEXT::eLinkStage: return "LinkStage"; + case ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize"; + case ShaderCreateFlagBitsEXT::eRequireFullSubgroups: return "RequireFullSubgroups"; + case ShaderCreateFlagBitsEXT::eNoTaskShader: return "NoTaskShader"; + case ShaderCreateFlagBitsEXT::eDispatchBase: return "DispatchBase"; + case ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment"; + case ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment: return "FragmentDensityMapAttachment"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ShaderCodeTypeEXT value ) + { + switch ( value ) + { + case ShaderCodeTypeEXT::eBinary: return "Binary"; + case ShaderCodeTypeEXT::eSpirv: return "Spirv"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing_invocation_reorder === + + VULKAN_HPP_INLINE std::string to_string( RayTracingInvocationReorderModeNV value ) + { + switch ( value ) + { + case RayTracingInvocationReorderModeNV::eNone: return "None"; + case RayTracingInvocationReorderModeNV::eReorder: return "Reorder"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_layer_settings === + + VULKAN_HPP_INLINE std::string to_string( LayerSettingTypeEXT value ) + { + switch ( value ) + { + case LayerSettingTypeEXT::eBool32: return "Bool32"; + case LayerSettingTypeEXT::eInt32: return "Int32"; + case LayerSettingTypeEXT::eInt64: return "Int64"; + case LayerSettingTypeEXT::eUint32: return "Uint32"; + case LayerSettingTypeEXT::eUint64: return "Uint64"; + case LayerSettingTypeEXT::eFloat32: return "Float32"; + case LayerSettingTypeEXT::eFloat64: return "Float64"; + case LayerSettingTypeEXT::eString: return "String"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_low_latency2 === + + VULKAN_HPP_INLINE std::string to_string( LatencyMarkerNV value ) + { + switch ( value ) + { + case LatencyMarkerNV::eSimulationStart: return "SimulationStart"; + case LatencyMarkerNV::eSimulationEnd: return "SimulationEnd"; + case LatencyMarkerNV::eRendersubmitStart: return "RendersubmitStart"; + case LatencyMarkerNV::eRendersubmitEnd: return "RendersubmitEnd"; + case LatencyMarkerNV::ePresentStart: return "PresentStart"; + case LatencyMarkerNV::ePresentEnd: return "PresentEnd"; + case LatencyMarkerNV::eInputSample: return "InputSample"; + case LatencyMarkerNV::eTriggerFlash: return "TriggerFlash"; + case LatencyMarkerNV::eOutOfBandRendersubmitStart: return "OutOfBandRendersubmitStart"; + case LatencyMarkerNV::eOutOfBandRendersubmitEnd: return "OutOfBandRendersubmitEnd"; + case LatencyMarkerNV::eOutOfBandPresentStart: return "OutOfBandPresentStart"; + case LatencyMarkerNV::eOutOfBandPresentEnd: return "OutOfBandPresentEnd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OutOfBandQueueTypeNV value ) + { + switch ( value ) + { + case OutOfBandQueueTypeNV::eRender: return "Render"; + case OutOfBandQueueTypeNV::ePresent: return "Present"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_cooperative_matrix === + + VULKAN_HPP_INLINE std::string to_string( ScopeKHR value ) + { + switch ( value ) + { + case ScopeKHR::eDevice: return "Device"; + case ScopeKHR::eWorkgroup: return "Workgroup"; + case ScopeKHR::eSubgroup: return "Subgroup"; + case ScopeKHR::eQueueFamily: return "QueueFamily"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ComponentTypeKHR value ) + { + switch ( value ) + { + case ComponentTypeKHR::eFloat16: return "Float16"; + case ComponentTypeKHR::eFloat32: return "Float32"; + case ComponentTypeKHR::eFloat64: return "Float64"; + case ComponentTypeKHR::eSint8: return "Sint8"; + case ComponentTypeKHR::eSint16: return "Sint16"; + case ComponentTypeKHR::eSint32: return "Sint32"; + case ComponentTypeKHR::eSint64: return "Sint64"; + case ComponentTypeKHR::eUint8: return "Uint8"; + case ComponentTypeKHR::eUint16: return "Uint16"; + case ComponentTypeKHR::eUint32: return "Uint32"; + case ComponentTypeKHR::eUint64: return "Uint64"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_QCOM_image_processing2 === + + VULKAN_HPP_INLINE std::string to_string( BlockMatchWindowCompareModeQCOM value ) + { + switch ( value ) + { + case BlockMatchWindowCompareModeQCOM::eMin: return "Min"; + case BlockMatchWindowCompareModeQCOM::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_QCOM_filter_cubic_weights === + + VULKAN_HPP_INLINE std::string to_string( CubicFilterWeightsQCOM value ) + { + switch ( value ) + { + case CubicFilterWeightsQCOM::eCatmullRom: return "CatmullRom"; + case CubicFilterWeightsQCOM::eZeroTangentCardinal: return "ZeroTangentCardinal"; + case CubicFilterWeightsQCOM::eBSpline: return "BSpline"; + case CubicFilterWeightsQCOM::eMitchellNetravali: return "MitchellNetravali"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_MSFT_layered_driver === + + VULKAN_HPP_INLINE std::string to_string( LayeredDriverUnderlyingApiMSFT value ) + { + switch ( value ) + { + case LayeredDriverUnderlyingApiMSFT::eNone: return "None"; + case LayeredDriverUnderlyingApiMSFT::eD3D12: return "D3D12"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_calibrated_timestamps === + + VULKAN_HPP_INLINE std::string to_string( TimeDomainKHR value ) + { + switch ( value ) + { + case TimeDomainKHR::eDevice: return "Device"; + case TimeDomainKHR::eClockMonotonic: return "ClockMonotonic"; + case TimeDomainKHR::eClockMonotonicRaw: return "ClockMonotonicRaw"; + case TimeDomainKHR::eQueryPerformanceCounter: return "QueryPerformanceCounter"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_vi.h b/MacroLibX/third_party/vulkan/vulkan_vi.h new file mode 100644 index 0000000..c9227e8 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_vi.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_VI_H_ +#define VULKAN_VI_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_NN_vi_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_NN_vi_surface 1 +#define VK_NN_VI_SURFACE_SPEC_VERSION 1 +#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" +typedef VkFlags VkViSurfaceCreateFlagsNN; +typedef struct VkViSurfaceCreateInfoNN { + VkStructureType sType; + const void* pNext; + VkViSurfaceCreateFlagsNN flags; + void* window; +} VkViSurfaceCreateInfoNN; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( + VkInstance instance, + const VkViSurfaceCreateInfoNN* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_video.hpp b/MacroLibX/third_party/vulkan/vulkan_video.hpp new file mode 100644 index 0000000..570dfa4 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_video.hpp @@ -0,0 +1,2702 @@ +// Copyright 2021-2023 The Khronos Group Inc. +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_VIDEO_HPP +#define VULKAN_VIDEO_HPP + +#include +#include +#include +#include +#include +#include +#include +#include + +#if !defined( VULKAN_HPP_VIDEO_NAMESPACE ) +# define VULKAN_HPP_VIDEO_NAMESPACE video +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + namespace VULKAN_HPP_VIDEO_NAMESPACE + { + + //============= + //=== ENUMs === + //============= + + //=== vulkan_video_codec_h264std === + + enum class H264ChromaFormatIdc + { + eMonochrome = STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME, + e420 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_420, + e422 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_422, + e444 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_444, + eInvalid = STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID + }; + + enum class H264ProfileIdc + { + eBaseline = STD_VIDEO_H264_PROFILE_IDC_BASELINE, + eMain = STD_VIDEO_H264_PROFILE_IDC_MAIN, + eHigh = STD_VIDEO_H264_PROFILE_IDC_HIGH, + eHigh444Predictive = STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE, + eInvalid = STD_VIDEO_H264_PROFILE_IDC_INVALID + }; + + enum class H264LevelIdc + { + e1_0 = STD_VIDEO_H264_LEVEL_IDC_1_0, + e1_1 = STD_VIDEO_H264_LEVEL_IDC_1_1, + e1_2 = STD_VIDEO_H264_LEVEL_IDC_1_2, + e1_3 = STD_VIDEO_H264_LEVEL_IDC_1_3, + e2_0 = STD_VIDEO_H264_LEVEL_IDC_2_0, + e2_1 = STD_VIDEO_H264_LEVEL_IDC_2_1, + e2_2 = STD_VIDEO_H264_LEVEL_IDC_2_2, + e3_0 = STD_VIDEO_H264_LEVEL_IDC_3_0, + e3_1 = STD_VIDEO_H264_LEVEL_IDC_3_1, + e3_2 = STD_VIDEO_H264_LEVEL_IDC_3_2, + e4_0 = STD_VIDEO_H264_LEVEL_IDC_4_0, + e4_1 = STD_VIDEO_H264_LEVEL_IDC_4_1, + e4_2 = STD_VIDEO_H264_LEVEL_IDC_4_2, + e5_0 = STD_VIDEO_H264_LEVEL_IDC_5_0, + e5_1 = STD_VIDEO_H264_LEVEL_IDC_5_1, + e5_2 = STD_VIDEO_H264_LEVEL_IDC_5_2, + e6_0 = STD_VIDEO_H264_LEVEL_IDC_6_0, + e6_1 = STD_VIDEO_H264_LEVEL_IDC_6_1, + e6_2 = STD_VIDEO_H264_LEVEL_IDC_6_2, + eInvalid = STD_VIDEO_H264_LEVEL_IDC_INVALID + }; + + enum class H264PocType + { + e0 = STD_VIDEO_H264_POC_TYPE_0, + e1 = STD_VIDEO_H264_POC_TYPE_1, + e2 = STD_VIDEO_H264_POC_TYPE_2, + eInvalid = STD_VIDEO_H264_POC_TYPE_INVALID + }; + + enum class H264AspectRatioIdc + { + eUnspecified = STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED, + eSquare = STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE, + e12_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11, + e10_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11, + e16_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11, + e40_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33, + e24_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11, + e20_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11, + e32_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11, + e80_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33, + e18_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11, + e15_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11, + e64_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33, + e160_99 = STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99, + e4_3 = STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3, + e3_2 = STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2, + e2_1 = STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1, + eExtendedSar = STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR, + eInvalid = STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID + }; + + enum class H264WeightedBipredIdc + { + eDefault = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT, + eExplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT, + eImplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT, + eInvalid = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID + }; + + enum class H264ModificationOfPicNumsIdc + { + eShortTermSubtract = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT, + eShortTermAdd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD, + eLongTerm = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM, + eEnd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END, + eInvalid = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID + }; + + enum class H264MemMgmtControlOp + { + eEnd = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END, + eUnmarkShortTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM, + eUnmarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM, + eMarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM, + eSetMaxLongTermIndex = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX, + eUnmarkAll = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL, + eMarkCurrentAsLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM, + eInvalid = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID + }; + + enum class H264CabacInitIdc + { + e0 = STD_VIDEO_H264_CABAC_INIT_IDC_0, + e1 = STD_VIDEO_H264_CABAC_INIT_IDC_1, + e2 = STD_VIDEO_H264_CABAC_INIT_IDC_2, + eInvalid = STD_VIDEO_H264_CABAC_INIT_IDC_INVALID + }; + + enum class H264DisableDeblockingFilterIdc + { + eDisabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED, + eEnabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED, + ePartial = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL, + eInvalid = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID + }; + + enum class H264SliceType + { + eP = STD_VIDEO_H264_SLICE_TYPE_P, + eB = STD_VIDEO_H264_SLICE_TYPE_B, + eI = STD_VIDEO_H264_SLICE_TYPE_I, + eInvalid = STD_VIDEO_H264_SLICE_TYPE_INVALID + }; + + enum class H264PictureType + { + eP = STD_VIDEO_H264_PICTURE_TYPE_P, + eB = STD_VIDEO_H264_PICTURE_TYPE_B, + eI = STD_VIDEO_H264_PICTURE_TYPE_I, + eIdr = STD_VIDEO_H264_PICTURE_TYPE_IDR, + eInvalid = STD_VIDEO_H264_PICTURE_TYPE_INVALID + }; + + enum class H264NonVclNaluType + { + eSps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS, + ePps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS, + eAud = STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD, + ePrefix = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX, + eEndOfSequence = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE, + eEndOfStream = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM, + ePrecoded = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED, + eInvalid = STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID + }; + + //=== vulkan_video_codec_h264std_decode === + + enum class DecodeH264FieldOrderCount + { + eTop = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP, + eBottom = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM, + eInvalid = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID + }; + + //=== vulkan_video_codec_h265std === + + enum class H265ChromaFormatIdc + { + eMonochrome = STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME, + e420 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_420, + e422 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_422, + e444 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_444, + eInvalid = STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID + }; + + enum class H265ProfileIdc + { + eMain = STD_VIDEO_H265_PROFILE_IDC_MAIN, + eMain10 = STD_VIDEO_H265_PROFILE_IDC_MAIN_10, + eMainStillPicture = STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE, + eFormatRangeExtensions = STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS, + eSccExtensions = STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS, + eInvalid = STD_VIDEO_H265_PROFILE_IDC_INVALID + }; + + enum class H265LevelIdc + { + e1_0 = STD_VIDEO_H265_LEVEL_IDC_1_0, + e2_0 = STD_VIDEO_H265_LEVEL_IDC_2_0, + e2_1 = STD_VIDEO_H265_LEVEL_IDC_2_1, + e3_0 = STD_VIDEO_H265_LEVEL_IDC_3_0, + e3_1 = STD_VIDEO_H265_LEVEL_IDC_3_1, + e4_0 = STD_VIDEO_H265_LEVEL_IDC_4_0, + e4_1 = STD_VIDEO_H265_LEVEL_IDC_4_1, + e5_0 = STD_VIDEO_H265_LEVEL_IDC_5_0, + e5_1 = STD_VIDEO_H265_LEVEL_IDC_5_1, + e5_2 = STD_VIDEO_H265_LEVEL_IDC_5_2, + e6_0 = STD_VIDEO_H265_LEVEL_IDC_6_0, + e6_1 = STD_VIDEO_H265_LEVEL_IDC_6_1, + e6_2 = STD_VIDEO_H265_LEVEL_IDC_6_2, + eInvalid = STD_VIDEO_H265_LEVEL_IDC_INVALID + }; + + enum class H265SliceType + { + eB = STD_VIDEO_H265_SLICE_TYPE_B, + eP = STD_VIDEO_H265_SLICE_TYPE_P, + eI = STD_VIDEO_H265_SLICE_TYPE_I, + eInvalid = STD_VIDEO_H265_SLICE_TYPE_INVALID + }; + + enum class H265PictureType + { + eP = STD_VIDEO_H265_PICTURE_TYPE_P, + eB = STD_VIDEO_H265_PICTURE_TYPE_B, + eI = STD_VIDEO_H265_PICTURE_TYPE_I, + eIdr = STD_VIDEO_H265_PICTURE_TYPE_IDR, + eInvalid = STD_VIDEO_H265_PICTURE_TYPE_INVALID + }; + + enum class H265AspectRatioIdc + { + eUnspecified = STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED, + eSquare = STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE, + e12_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11, + e10_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11, + e16_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11, + e40_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33, + e24_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11, + e20_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11, + e32_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11, + e80_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33, + e18_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11, + e15_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11, + e64_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33, + e160_99 = STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99, + e4_3 = STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3, + e3_2 = STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2, + e2_1 = STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1, + eExtendedSar = STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR, + eInvalid = STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID + }; + + //=============== + //=== STRUCTS === + //=============== + + //=== vulkan_video_codec_h264std === + + struct H264SpsVuiFlags + { + using NativeType = StdVideoH264SpsVuiFlags; + + operator StdVideoH264SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsVuiFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && ( color_description_present_flag == rhs.color_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) && + ( fixed_frame_rate_flag == rhs.fixed_frame_rate_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && + ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ); + } + + bool operator!=( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t color_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t timing_info_present_flag : 1; + uint32_t fixed_frame_rate_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + }; + + struct H264HrdParameters + { + using NativeType = StdVideoH264HrdParameters; + + operator StdVideoH264HrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264HrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && ( cpb_size_scale == rhs.cpb_size_scale ) && + ( reserved1 == rhs.reserved1 ) && ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && + ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && ( cbr_flag == rhs.cbr_flag ) && + ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && + ( cpb_removal_delay_length_minus1 == rhs.cpb_removal_delay_length_minus1 ) && + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( time_offset_length == rhs.time_offset_length ); + } + + bool operator!=( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t cpb_cnt_minus1 = {}; + uint8_t bit_rate_scale = {}; + uint8_t cpb_size_scale = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cbr_flag = {}; + uint32_t initial_cpb_removal_delay_length_minus1 = {}; + uint32_t cpb_removal_delay_length_minus1 = {}; + uint32_t dpb_output_delay_length_minus1 = {}; + uint32_t time_offset_length = {}; + }; + + struct H264SequenceParameterSetVui + { + using NativeType = StdVideoH264SequenceParameterSetVui; + + operator StdVideoH264SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coefficients == rhs.matrix_coefficients ) && + ( num_units_in_tick == rhs.num_units_in_tick ) && ( time_scale == rhs.time_scale ) && ( max_num_reorder_frames == rhs.max_num_reorder_frames ) && + ( max_dec_frame_buffering == rhs.max_dec_frame_buffering ) && ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + ( pHrdParameters == rhs.pHrdParameters ); + } + + bool operator!=( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsVuiFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc aspect_ratio_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc::eUnspecified; + uint16_t sar_width = {}; + uint16_t sar_height = {}; + uint8_t video_format = {}; + uint8_t colour_primaries = {}; + uint8_t transfer_characteristics = {}; + uint8_t matrix_coefficients = {}; + uint32_t num_units_in_tick = {}; + uint32_t time_scale = {}; + uint8_t max_num_reorder_frames = {}; + uint8_t max_dec_frame_buffering = {}; + uint8_t chroma_sample_loc_type_top_field = {}; + uint8_t chroma_sample_loc_type_bottom_field = {}; + uint32_t reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264HrdParameters * pHrdParameters = {}; + }; + + struct H264SpsFlags + { + using NativeType = StdVideoH264SpsFlags; + + operator StdVideoH264SpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( constraint_set0_flag == rhs.constraint_set0_flag ) && ( constraint_set1_flag == rhs.constraint_set1_flag ) && + ( constraint_set2_flag == rhs.constraint_set2_flag ) && ( constraint_set3_flag == rhs.constraint_set3_flag ) && + ( constraint_set4_flag == rhs.constraint_set4_flag ) && ( constraint_set5_flag == rhs.constraint_set5_flag ) && + ( direct_8x8_inference_flag == rhs.direct_8x8_inference_flag ) && ( mb_adaptive_frame_field_flag == rhs.mb_adaptive_frame_field_flag ) && + ( frame_mbs_only_flag == rhs.frame_mbs_only_flag ) && ( delta_pic_order_always_zero_flag == rhs.delta_pic_order_always_zero_flag ) && + ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + ( gaps_in_frame_num_value_allowed_flag == rhs.gaps_in_frame_num_value_allowed_flag ) && + ( qpprime_y_zero_transform_bypass_flag == rhs.qpprime_y_zero_transform_bypass_flag ) && ( frame_cropping_flag == rhs.frame_cropping_flag ) && + ( seq_scaling_matrix_present_flag == rhs.seq_scaling_matrix_present_flag ) && ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ); + } + + bool operator!=( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t constraint_set0_flag : 1; + uint32_t constraint_set1_flag : 1; + uint32_t constraint_set2_flag : 1; + uint32_t constraint_set3_flag : 1; + uint32_t constraint_set4_flag : 1; + uint32_t constraint_set5_flag : 1; + uint32_t direct_8x8_inference_flag : 1; + uint32_t mb_adaptive_frame_field_flag : 1; + uint32_t frame_mbs_only_flag : 1; + uint32_t delta_pic_order_always_zero_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t gaps_in_frame_num_value_allowed_flag : 1; + uint32_t qpprime_y_zero_transform_bypass_flag : 1; + uint32_t frame_cropping_flag : 1; + uint32_t seq_scaling_matrix_present_flag : 1; + uint32_t vui_parameters_present_flag : 1; + }; + + struct H264ScalingLists + { + using NativeType = StdVideoH264ScalingLists; + + operator StdVideoH264ScalingLists const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264ScalingLists &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( scaling_list_present_mask == rhs.scaling_list_present_mask ) && ( use_default_scaling_matrix_mask == rhs.use_default_scaling_matrix_mask ) && + ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ); + } + + bool operator!=( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint16_t scaling_list_present_mask = {}; + uint16_t use_default_scaling_matrix_mask = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList4x4 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList8x8 = {}; + }; + + struct H264SequenceParameterSet + { + using NativeType = StdVideoH264SequenceParameterSet; + + operator StdVideoH264SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( profile_idc == rhs.profile_idc ) && ( level_idc == rhs.level_idc ) && + ( chroma_format_idc == rhs.chroma_format_idc ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_frame_num_minus4 == rhs.log2_max_frame_num_minus4 ) && ( pic_order_cnt_type == rhs.pic_order_cnt_type ) && + ( offset_for_non_ref_pic == rhs.offset_for_non_ref_pic ) && ( offset_for_top_to_bottom_field == rhs.offset_for_top_to_bottom_field ) && + ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && + ( num_ref_frames_in_pic_order_cnt_cycle == rhs.num_ref_frames_in_pic_order_cnt_cycle ) && ( max_num_ref_frames == rhs.max_num_ref_frames ) && + ( reserved1 == rhs.reserved1 ) && ( pic_width_in_mbs_minus1 == rhs.pic_width_in_mbs_minus1 ) && + ( pic_height_in_map_units_minus1 == rhs.pic_height_in_map_units_minus1 ) && ( frame_crop_left_offset == rhs.frame_crop_left_offset ) && + ( frame_crop_right_offset == rhs.frame_crop_right_offset ) && ( frame_crop_top_offset == rhs.frame_crop_top_offset ) && + ( frame_crop_bottom_offset == rhs.frame_crop_bottom_offset ) && ( reserved2 == rhs.reserved2 ) && + ( pOffsetForRefFrame == rhs.pOffsetForRefFrame ) && ( pScalingLists == rhs.pScalingLists ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ); + } + + bool operator!=( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc profile_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc::eBaseline; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc::e1_0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc chroma_format_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc::eMonochrome; + uint8_t seq_parameter_set_id = {}; + uint8_t bit_depth_luma_minus8 = {}; + uint8_t bit_depth_chroma_minus8 = {}; + uint8_t log2_max_frame_num_minus4 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType pic_order_cnt_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType::e0; + int32_t offset_for_non_ref_pic = {}; + int32_t offset_for_top_to_bottom_field = {}; + uint8_t log2_max_pic_order_cnt_lsb_minus4 = {}; + uint8_t num_ref_frames_in_pic_order_cnt_cycle = {}; + uint8_t max_num_ref_frames = {}; + uint8_t reserved1 = {}; + uint32_t pic_width_in_mbs_minus1 = {}; + uint32_t pic_height_in_map_units_minus1 = {}; + uint32_t frame_crop_left_offset = {}; + uint32_t frame_crop_right_offset = {}; + uint32_t frame_crop_top_offset = {}; + uint32_t frame_crop_bottom_offset = {}; + uint32_t reserved2 = {}; + const int32_t * pOffsetForRefFrame = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SequenceParameterSetVui * pSequenceParameterSetVui = {}; + }; + + struct H264PpsFlags + { + using NativeType = StdVideoH264PpsFlags; + + operator StdVideoH264PpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( transform_8x8_mode_flag == rhs.transform_8x8_mode_flag ) && ( redundant_pic_cnt_present_flag == rhs.redundant_pic_cnt_present_flag ) && + ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && ( weighted_pred_flag == rhs.weighted_pred_flag ) && + ( bottom_field_pic_order_in_frame_present_flag == rhs.bottom_field_pic_order_in_frame_present_flag ) && + ( entropy_coding_mode_flag == rhs.entropy_coding_mode_flag ) && ( pic_scaling_matrix_present_flag == rhs.pic_scaling_matrix_present_flag ); + } + + bool operator!=( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t transform_8x8_mode_flag : 1; + uint32_t redundant_pic_cnt_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t bottom_field_pic_order_in_frame_present_flag : 1; + uint32_t entropy_coding_mode_flag : 1; + uint32_t pic_scaling_matrix_present_flag : 1; + }; + + struct H264PictureParameterSet + { + using NativeType = StdVideoH264PictureParameterSet; + + operator StdVideoH264PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PictureParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( weighted_bipred_idc == rhs.weighted_bipred_idc ) && + ( pic_init_qp_minus26 == rhs.pic_init_qp_minus26 ) && ( pic_init_qs_minus26 == rhs.pic_init_qs_minus26 ) && + ( chroma_qp_index_offset == rhs.chroma_qp_index_offset ) && ( second_chroma_qp_index_offset == rhs.second_chroma_qp_index_offset ) && + ( pScalingLists == rhs.pScalingLists ); + } + + bool operator!=( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PpsFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint8_t num_ref_idx_l0_default_active_minus1 = {}; + uint8_t num_ref_idx_l1_default_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc weighted_bipred_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc::eDefault; + int8_t pic_init_qp_minus26 = {}; + int8_t pic_init_qs_minus26 = {}; + int8_t chroma_qp_index_offset = {}; + int8_t second_chroma_qp_index_offset = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {}; + }; + + //=== vulkan_video_codec_h264std_decode === + + struct DecodeH264PictureInfoFlags + { + using NativeType = StdVideoDecodeH264PictureInfoFlags; + + operator StdVideoDecodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( field_pic_flag == rhs.field_pic_flag ) && ( is_intra == rhs.is_intra ) && ( IdrPicFlag == rhs.IdrPicFlag ) && + ( bottom_field_flag == rhs.bottom_field_flag ) && ( is_reference == rhs.is_reference ) && + ( complementary_field_pair == rhs.complementary_field_pair ); + } + + bool operator!=( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t field_pic_flag : 1; + uint32_t is_intra : 1; + uint32_t IdrPicFlag : 1; + uint32_t bottom_field_flag : 1; + uint32_t is_reference : 1; + uint32_t complementary_field_pair : 1; + }; + + struct DecodeH264PictureInfo + { + using NativeType = StdVideoDecodeH264PictureInfo; + + operator StdVideoDecodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( frame_num == rhs.frame_num ) && ( idr_pic_id == rhs.idr_pic_id ) && + ( PicOrderCnt == rhs.PicOrderCnt ); + } + + bool operator!=( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264PictureInfoFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint16_t frame_num = {}; + uint16_t idr_pic_id = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D PicOrderCnt = {}; + }; + + struct DecodeH264ReferenceInfoFlags + { + using NativeType = StdVideoDecodeH264ReferenceInfoFlags; + + operator StdVideoDecodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( top_field_flag == rhs.top_field_flag ) && ( bottom_field_flag == rhs.bottom_field_flag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( is_non_existing == rhs.is_non_existing ); + } + + bool operator!=( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t top_field_flag : 1; + uint32_t bottom_field_flag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t is_non_existing : 1; + }; + + struct DecodeH264ReferenceInfo + { + using NativeType = StdVideoDecodeH264ReferenceInfo; + + operator StdVideoDecodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( FrameNum == rhs.FrameNum ) && ( reserved == rhs.reserved ) && ( PicOrderCnt == rhs.PicOrderCnt ); + } + + bool operator!=( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264ReferenceInfoFlags flags = {}; + uint16_t FrameNum = {}; + uint16_t reserved = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D PicOrderCnt = {}; + }; + + //=== vulkan_video_codec_h264std_encode === + + struct EncodeH264WeightTableFlags + { + using NativeType = StdVideoEncodeH264WeightTableFlags; + + operator StdVideoEncodeH264WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTableFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + } + + bool operator!=( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t luma_weight_l0_flag = {}; + uint32_t chroma_weight_l0_flag = {}; + uint32_t luma_weight_l1_flag = {}; + uint32_t chroma_weight_l1_flag = {}; + }; + + struct EncodeH264WeightTable + { + using NativeType = StdVideoEncodeH264WeightTable; + + operator StdVideoEncodeH264WeightTable const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTable &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( chroma_log2_weight_denom == rhs.chroma_log2_weight_denom ) && ( luma_weight_l0 == rhs.luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( chroma_weight_l0 == rhs.chroma_weight_l0 ) && ( chroma_offset_l0 == rhs.chroma_offset_l0 ) && + ( luma_weight_l1 == rhs.luma_weight_l1 ) && ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( chroma_weight_l1 == rhs.chroma_weight_l1 ) && + ( chroma_offset_l1 == rhs.chroma_offset_l1 ); + } + + bool operator!=( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTableFlags flags = {}; + uint8_t luma_log2_weight_denom = {}; + uint8_t chroma_log2_weight_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_offset_l1 = {}; + }; + + struct EncodeH264SliceHeaderFlags + { + using NativeType = StdVideoEncodeH264SliceHeaderFlags; + + operator StdVideoEncodeH264SliceHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( direct_spatial_mv_pred_flag == rhs.direct_spatial_mv_pred_flag ) && + ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t direct_spatial_mv_pred_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t reserved : 30; + }; + + struct EncodeH264PictureInfoFlags + { + using NativeType = StdVideoEncodeH264PictureInfoFlags; + + operator StdVideoEncodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( IdrPicFlag == rhs.IdrPicFlag ) && ( is_reference == rhs.is_reference ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && ( long_term_reference_flag == rhs.long_term_reference_flag ) && + ( adaptive_ref_pic_marking_mode_flag == rhs.adaptive_ref_pic_marking_mode_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t IdrPicFlag : 1; + uint32_t is_reference : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t long_term_reference_flag : 1; + uint32_t adaptive_ref_pic_marking_mode_flag : 1; + uint32_t reserved : 27; + }; + + struct EncodeH264ReferenceInfoFlags + { + using NativeType = StdVideoEncodeH264ReferenceInfoFlags; + + operator StdVideoEncodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t reserved : 31; + }; + + struct EncodeH264ReferenceListsInfoFlags + { + using NativeType = StdVideoEncodeH264ReferenceListsInfoFlags; + + operator StdVideoEncodeH264ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; + }; + + struct EncodeH264RefListModEntry + { + using NativeType = StdVideoEncodeH264RefListModEntry; + + operator StdVideoEncodeH264RefListModEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefListModEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( modification_of_pic_nums_idc == rhs.modification_of_pic_nums_idc ) && ( abs_diff_pic_num_minus1 == rhs.abs_diff_pic_num_minus1 ) && + ( long_term_pic_num == rhs.long_term_pic_num ); + } + + bool operator!=( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc modification_of_pic_nums_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc::eShortTermSubtract; + uint16_t abs_diff_pic_num_minus1 = {}; + uint16_t long_term_pic_num = {}; + }; + + struct EncodeH264RefPicMarkingEntry + { + using NativeType = StdVideoEncodeH264RefPicMarkingEntry; + + operator StdVideoEncodeH264RefPicMarkingEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefPicMarkingEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memory_management_control_operation == rhs.memory_management_control_operation ) && + ( difference_of_pic_nums_minus1 == rhs.difference_of_pic_nums_minus1 ) && ( long_term_pic_num == rhs.long_term_pic_num ) && + ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( max_long_term_frame_idx_plus1 == rhs.max_long_term_frame_idx_plus1 ); + } + + bool operator!=( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp memory_management_control_operation = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp::eEnd; + uint16_t difference_of_pic_nums_minus1 = {}; + uint16_t long_term_pic_num = {}; + uint16_t long_term_frame_idx = {}; + uint16_t max_long_term_frame_idx_plus1 = {}; + }; + + struct EncodeH264ReferenceListsInfo + { + using NativeType = StdVideoEncodeH264ReferenceListsInfo; + + operator StdVideoEncodeH264ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && ( refList0ModOpCount == rhs.refList0ModOpCount ) && ( refList1ModOpCount == rhs.refList1ModOpCount ) && + ( refPicMarkingOpCount == rhs.refPicMarkingOpCount ) && ( reserved1 == rhs.reserved1 ) && + ( pRefList0ModOperations == rhs.pRefList0ModOperations ) && ( pRefList1ModOperations == rhs.pRefList1ModOperations ) && + ( pRefPicMarkingOperations == rhs.pRefPicMarkingOperations ); + } + + bool operator!=( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfoFlags flags = {}; + uint8_t num_ref_idx_l0_active_minus1 = {}; + uint8_t num_ref_idx_l1_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList1 = {}; + uint8_t refList0ModOpCount = {}; + uint8_t refList1ModOpCount = {}; + uint8_t refPicMarkingOpCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList0ModOperations = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList1ModOperations = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefPicMarkingEntry * pRefPicMarkingOperations = {}; + }; + + struct EncodeH264PictureInfo + { + using NativeType = StdVideoEncodeH264PictureInfo; + + operator StdVideoEncodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( idr_pic_id == rhs.idr_pic_id ) && ( primary_pic_type == rhs.primary_pic_type ) && ( frame_num == rhs.frame_num ) && + ( PicOrderCnt == rhs.PicOrderCnt ) && ( temporal_id == rhs.temporal_id ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ); + } + + bool operator!=( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264PictureInfoFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint16_t idr_pic_id = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP; + uint32_t frame_num = {}; + int32_t PicOrderCnt = {}; + uint8_t temporal_id = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfo * pRefLists = {}; + }; + + struct EncodeH264ReferenceInfo + { + using NativeType = StdVideoEncodeH264ReferenceInfo; + + operator StdVideoEncodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( primary_pic_type == rhs.primary_pic_type ) && ( FrameNum == rhs.FrameNum ) && ( PicOrderCnt == rhs.PicOrderCnt ) && + ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( temporal_id == rhs.temporal_id ); + } + + bool operator!=( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP; + uint32_t FrameNum = {}; + int32_t PicOrderCnt = {}; + uint16_t long_term_pic_num = {}; + uint16_t long_term_frame_idx = {}; + uint8_t temporal_id = {}; + }; + + struct EncodeH264SliceHeader + { + using NativeType = StdVideoEncodeH264SliceHeader; + + operator StdVideoEncodeH264SliceHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( first_mb_in_slice == rhs.first_mb_in_slice ) && ( slice_type == rhs.slice_type ) && + ( slice_alpha_c0_offset_div2 == rhs.slice_alpha_c0_offset_div2 ) && ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && + ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && ( cabac_init_idc == rhs.cabac_init_idc ) && + ( disable_deblocking_filter_idc == rhs.disable_deblocking_filter_idc ) && ( pWeightTable == rhs.pWeightTable ); + } + + bool operator!=( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264SliceHeaderFlags flags = {}; + uint32_t first_mb_in_slice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType::eP; + int8_t slice_alpha_c0_offset_div2 = {}; + int8_t slice_beta_offset_div2 = {}; + int8_t slice_qp_delta = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc cabac_init_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc::e0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc disable_deblocking_filter_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc::eDisabled; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTable * pWeightTable = {}; + }; + + //=== vulkan_video_codec_h265std === + + struct H265DecPicBufMgr + { + using NativeType = StdVideoH265DecPicBufMgr; + + operator StdVideoH265DecPicBufMgr const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265DecPicBufMgr &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( max_latency_increase_plus1 == rhs.max_latency_increase_plus1 ) && ( max_dec_pic_buffering_minus1 == rhs.max_dec_pic_buffering_minus1 ) && + ( max_num_reorder_pics == rhs.max_num_reorder_pics ); + } + + bool operator!=( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_latency_increase_plus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_dec_pic_buffering_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_num_reorder_pics = {}; + }; + + struct H265SubLayerHrdParameters + { + using NativeType = StdVideoH265SubLayerHrdParameters; + + operator StdVideoH265SubLayerHrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SubLayerHrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && + ( cpb_size_du_value_minus1 == rhs.cpb_size_du_value_minus1 ) && ( bit_rate_du_value_minus1 == rhs.bit_rate_du_value_minus1 ) && + ( cbr_flag == rhs.cbr_flag ); + } + + bool operator!=( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_du_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_du_value_minus1 = {}; + uint32_t cbr_flag = {}; + }; + + struct H265HrdFlags + { + using NativeType = StdVideoH265HrdFlags; + + operator StdVideoH265HrdFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && + ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ) && + ( sub_pic_hrd_params_present_flag == rhs.sub_pic_hrd_params_present_flag ) && + ( sub_pic_cpb_params_in_pic_timing_sei_flag == rhs.sub_pic_cpb_params_in_pic_timing_sei_flag ) && + ( fixed_pic_rate_general_flag == rhs.fixed_pic_rate_general_flag ) && ( fixed_pic_rate_within_cvs_flag == rhs.fixed_pic_rate_within_cvs_flag ) && + ( low_delay_hrd_flag == rhs.low_delay_hrd_flag ); + } + + bool operator!=( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint32_t fixed_pic_rate_general_flag : 8; + uint32_t fixed_pic_rate_within_cvs_flag : 8; + uint32_t low_delay_hrd_flag : 8; + }; + + struct H265HrdParameters + { + using NativeType = StdVideoH265HrdParameters; + + operator StdVideoH265HrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( tick_divisor_minus2 == rhs.tick_divisor_minus2 ) && + ( du_cpb_removal_delay_increment_length_minus1 == rhs.du_cpb_removal_delay_increment_length_minus1 ) && + ( dpb_output_delay_du_length_minus1 == rhs.dpb_output_delay_du_length_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && + ( cpb_size_scale == rhs.cpb_size_scale ) && ( cpb_size_du_scale == rhs.cpb_size_du_scale ) && + ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && + ( au_cpb_removal_delay_length_minus1 == rhs.au_cpb_removal_delay_length_minus1 ) && + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && + ( elemental_duration_in_tc_minus1 == rhs.elemental_duration_in_tc_minus1 ) && ( reserved == rhs.reserved ) && + ( pSubLayerHrdParametersNal == rhs.pSubLayerHrdParametersNal ) && ( pSubLayerHrdParametersVcl == rhs.pSubLayerHrdParametersVcl ); + } + + bool operator!=( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdFlags flags = {}; + uint8_t tick_divisor_minus2 = {}; + uint8_t du_cpb_removal_delay_increment_length_minus1 = {}; + uint8_t dpb_output_delay_du_length_minus1 = {}; + uint8_t bit_rate_scale = {}; + uint8_t cpb_size_scale = {}; + uint8_t cpb_size_du_scale = {}; + uint8_t initial_cpb_removal_delay_length_minus1 = {}; + uint8_t au_cpb_removal_delay_length_minus1 = {}; + uint8_t dpb_output_delay_length_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_cnt_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D elemental_duration_in_tc_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersNal = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersVcl = {}; + }; + + struct H265VpsFlags + { + using NativeType = StdVideoH265VpsFlags; + + operator StdVideoH265VpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( vps_temporal_id_nesting_flag == rhs.vps_temporal_id_nesting_flag ) && + ( vps_sub_layer_ordering_info_present_flag == rhs.vps_sub_layer_ordering_info_present_flag ) && + ( vps_timing_info_present_flag == rhs.vps_timing_info_present_flag ) && + ( vps_poc_proportional_to_timing_flag == rhs.vps_poc_proportional_to_timing_flag ); + } + + bool operator!=( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; + }; + + struct H265ProfileTierLevelFlags + { + using NativeType = StdVideoH265ProfileTierLevelFlags; + + operator StdVideoH265ProfileTierLevelFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevelFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( general_tier_flag == rhs.general_tier_flag ) && ( general_progressive_source_flag == rhs.general_progressive_source_flag ) && + ( general_interlaced_source_flag == rhs.general_interlaced_source_flag ) && + ( general_non_packed_constraint_flag == rhs.general_non_packed_constraint_flag ) && + ( general_frame_only_constraint_flag == rhs.general_frame_only_constraint_flag ); + } + + bool operator!=( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t general_tier_flag : 1; + uint32_t general_progressive_source_flag : 1; + uint32_t general_interlaced_source_flag : 1; + uint32_t general_non_packed_constraint_flag : 1; + uint32_t general_frame_only_constraint_flag : 1; + }; + + struct H265ProfileTierLevel + { + using NativeType = StdVideoH265ProfileTierLevel; + + operator StdVideoH265ProfileTierLevel const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevel &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( general_profile_idc == rhs.general_profile_idc ) && ( general_level_idc == rhs.general_level_idc ); + } + + bool operator!=( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevelFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc general_profile_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc::eMain; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc general_level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc::e1_0; + }; + + struct H265VideoParameterSet + { + using NativeType = StdVideoH265VideoParameterSet; + + operator StdVideoH265VideoParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VideoParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( vps_video_parameter_set_id == rhs.vps_video_parameter_set_id ) && + ( vps_max_sub_layers_minus1 == rhs.vps_max_sub_layers_minus1 ) && ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && + ( vps_num_units_in_tick == rhs.vps_num_units_in_tick ) && ( vps_time_scale == rhs.vps_time_scale ) && + ( vps_num_ticks_poc_diff_one_minus1 == rhs.vps_num_ticks_poc_diff_one_minus1 ) && ( reserved3 == rhs.reserved3 ) && + ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pHrdParameters == rhs.pHrdParameters ) && ( pProfileTierLevel == rhs.pProfileTierLevel ); + } + + bool operator!=( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265VpsFlags flags = {}; + uint8_t vps_video_parameter_set_id = {}; + uint8_t vps_max_sub_layers_minus1 = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint32_t vps_num_units_in_tick = {}; + uint32_t vps_time_scale = {}; + uint32_t vps_num_ticks_poc_diff_one_minus1 = {}; + uint32_t reserved3 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {}; + }; + + struct H265ScalingLists + { + using NativeType = StdVideoH265ScalingLists; + + operator StdVideoH265ScalingLists const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ScalingLists &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ) && ( ScalingList16x16 == rhs.ScalingList16x16 ) && + ( ScalingList32x32 == rhs.ScalingList32x32 ) && ( ScalingListDCCoef16x16 == rhs.ScalingListDCCoef16x16 ) && + ( ScalingListDCCoef32x32 == rhs.ScalingListDCCoef32x32 ); + } + + bool operator!=( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList4x4 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList8x8 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList16x16 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList32x32 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ScalingListDCCoef16x16 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ScalingListDCCoef32x32 = {}; + }; + + struct H265SpsVuiFlags + { + using NativeType = StdVideoH265SpsVuiFlags; + + operator StdVideoH265SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsVuiFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && ( colour_description_present_flag == rhs.colour_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && + ( neutral_chroma_indication_flag == rhs.neutral_chroma_indication_flag ) && ( field_seq_flag == rhs.field_seq_flag ) && + ( frame_field_info_present_flag == rhs.frame_field_info_present_flag ) && ( default_display_window_flag == rhs.default_display_window_flag ) && + ( vui_timing_info_present_flag == rhs.vui_timing_info_present_flag ) && + ( vui_poc_proportional_to_timing_flag == rhs.vui_poc_proportional_to_timing_flag ) && + ( vui_hrd_parameters_present_flag == rhs.vui_hrd_parameters_present_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( tiles_fixed_structure_flag == rhs.tiles_fixed_structure_flag ) && + ( motion_vectors_over_pic_boundaries_flag == rhs.motion_vectors_over_pic_boundaries_flag ) && + ( restricted_ref_pic_lists_flag == rhs.restricted_ref_pic_lists_flag ); + } + + bool operator!=( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; + }; + + struct H265SequenceParameterSetVui + { + using NativeType = StdVideoH265SequenceParameterSetVui; + + operator StdVideoH265SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coeffs == rhs.matrix_coeffs ) && + ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && ( def_disp_win_left_offset == rhs.def_disp_win_left_offset ) && + ( def_disp_win_right_offset == rhs.def_disp_win_right_offset ) && ( def_disp_win_top_offset == rhs.def_disp_win_top_offset ) && + ( def_disp_win_bottom_offset == rhs.def_disp_win_bottom_offset ) && ( vui_num_units_in_tick == rhs.vui_num_units_in_tick ) && + ( vui_time_scale == rhs.vui_time_scale ) && ( vui_num_ticks_poc_diff_one_minus1 == rhs.vui_num_ticks_poc_diff_one_minus1 ) && + ( min_spatial_segmentation_idc == rhs.min_spatial_segmentation_idc ) && ( reserved3 == rhs.reserved3 ) && + ( max_bytes_per_pic_denom == rhs.max_bytes_per_pic_denom ) && ( max_bits_per_min_cu_denom == rhs.max_bits_per_min_cu_denom ) && + ( log2_max_mv_length_horizontal == rhs.log2_max_mv_length_horizontal ) && ( log2_max_mv_length_vertical == rhs.log2_max_mv_length_vertical ) && + ( pHrdParameters == rhs.pHrdParameters ); + } + + bool operator!=( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsVuiFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc aspect_ratio_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc::eUnspecified; + uint16_t sar_width = {}; + uint16_t sar_height = {}; + uint8_t video_format = {}; + uint8_t colour_primaries = {}; + uint8_t transfer_characteristics = {}; + uint8_t matrix_coeffs = {}; + uint8_t chroma_sample_loc_type_top_field = {}; + uint8_t chroma_sample_loc_type_bottom_field = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint16_t def_disp_win_left_offset = {}; + uint16_t def_disp_win_right_offset = {}; + uint16_t def_disp_win_top_offset = {}; + uint16_t def_disp_win_bottom_offset = {}; + uint32_t vui_num_units_in_tick = {}; + uint32_t vui_time_scale = {}; + uint32_t vui_num_ticks_poc_diff_one_minus1 = {}; + uint16_t min_spatial_segmentation_idc = {}; + uint16_t reserved3 = {}; + uint8_t max_bytes_per_pic_denom = {}; + uint8_t max_bits_per_min_cu_denom = {}; + uint8_t log2_max_mv_length_horizontal = {}; + uint8_t log2_max_mv_length_vertical = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {}; + }; + + struct H265PredictorPaletteEntries + { + using NativeType = StdVideoH265PredictorPaletteEntries; + + operator StdVideoH265PredictorPaletteEntries const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PredictorPaletteEntries &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( PredictorPaletteEntries == rhs.PredictorPaletteEntries ); + } + + bool operator!=( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE:: + ArrayWrapper2D + PredictorPaletteEntries = {}; + }; + + struct H265SpsFlags + { + using NativeType = StdVideoH265SpsFlags; + + operator StdVideoH265SpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sps_temporal_id_nesting_flag == rhs.sps_temporal_id_nesting_flag ) && ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + ( conformance_window_flag == rhs.conformance_window_flag ) && + ( sps_sub_layer_ordering_info_present_flag == rhs.sps_sub_layer_ordering_info_present_flag ) && + ( scaling_list_enabled_flag == rhs.scaling_list_enabled_flag ) && + ( sps_scaling_list_data_present_flag == rhs.sps_scaling_list_data_present_flag ) && ( amp_enabled_flag == rhs.amp_enabled_flag ) && + ( sample_adaptive_offset_enabled_flag == rhs.sample_adaptive_offset_enabled_flag ) && ( pcm_enabled_flag == rhs.pcm_enabled_flag ) && + ( pcm_loop_filter_disabled_flag == rhs.pcm_loop_filter_disabled_flag ) && + ( long_term_ref_pics_present_flag == rhs.long_term_ref_pics_present_flag ) && + ( sps_temporal_mvp_enabled_flag == rhs.sps_temporal_mvp_enabled_flag ) && + ( strong_intra_smoothing_enabled_flag == rhs.strong_intra_smoothing_enabled_flag ) && + ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ) && ( sps_extension_present_flag == rhs.sps_extension_present_flag ) && + ( sps_range_extension_flag == rhs.sps_range_extension_flag ) && + ( transform_skip_rotation_enabled_flag == rhs.transform_skip_rotation_enabled_flag ) && + ( transform_skip_context_enabled_flag == rhs.transform_skip_context_enabled_flag ) && + ( implicit_rdpcm_enabled_flag == rhs.implicit_rdpcm_enabled_flag ) && ( explicit_rdpcm_enabled_flag == rhs.explicit_rdpcm_enabled_flag ) && + ( extended_precision_processing_flag == rhs.extended_precision_processing_flag ) && + ( intra_smoothing_disabled_flag == rhs.intra_smoothing_disabled_flag ) && + ( high_precision_offsets_enabled_flag == rhs.high_precision_offsets_enabled_flag ) && + ( persistent_rice_adaptation_enabled_flag == rhs.persistent_rice_adaptation_enabled_flag ) && + ( cabac_bypass_alignment_enabled_flag == rhs.cabac_bypass_alignment_enabled_flag ) && ( sps_scc_extension_flag == rhs.sps_scc_extension_flag ) && + ( sps_curr_pic_ref_enabled_flag == rhs.sps_curr_pic_ref_enabled_flag ) && ( palette_mode_enabled_flag == rhs.palette_mode_enabled_flag ) && + ( sps_palette_predictor_initializers_present_flag == rhs.sps_palette_predictor_initializers_present_flag ) && + ( intra_boundary_filtering_disabled_flag == rhs.intra_boundary_filtering_disabled_flag ); + } + + bool operator!=( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t conformance_window_flag : 1; + uint32_t sps_sub_layer_ordering_info_present_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + uint32_t sps_scc_extension_flag : 1; + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializers_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; + }; + + struct H265ShortTermRefPicSetFlags + { + using NativeType = StdVideoH265ShortTermRefPicSetFlags; + + operator StdVideoH265ShortTermRefPicSetFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSetFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( inter_ref_pic_set_prediction_flag == rhs.inter_ref_pic_set_prediction_flag ) && ( delta_rps_sign == rhs.delta_rps_sign ); + } + + bool operator!=( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t inter_ref_pic_set_prediction_flag : 1; + uint32_t delta_rps_sign : 1; + }; + + struct H265ShortTermRefPicSet + { + using NativeType = StdVideoH265ShortTermRefPicSet; + + operator StdVideoH265ShortTermRefPicSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( delta_idx_minus1 == rhs.delta_idx_minus1 ) && ( use_delta_flag == rhs.use_delta_flag ) && + ( abs_delta_rps_minus1 == rhs.abs_delta_rps_minus1 ) && ( used_by_curr_pic_flag == rhs.used_by_curr_pic_flag ) && + ( used_by_curr_pic_s0_flag == rhs.used_by_curr_pic_s0_flag ) && ( used_by_curr_pic_s1_flag == rhs.used_by_curr_pic_s1_flag ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( reserved3 == rhs.reserved3 ) && + ( num_negative_pics == rhs.num_negative_pics ) && ( num_positive_pics == rhs.num_positive_pics ) && + ( delta_poc_s0_minus1 == rhs.delta_poc_s0_minus1 ) && ( delta_poc_s1_minus1 == rhs.delta_poc_s1_minus1 ); + } + + bool operator!=( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSetFlags flags = {}; + uint32_t delta_idx_minus1 = {}; + uint16_t use_delta_flag = {}; + uint16_t abs_delta_rps_minus1 = {}; + uint16_t used_by_curr_pic_flag = {}; + uint16_t used_by_curr_pic_s0_flag = {}; + uint16_t used_by_curr_pic_s1_flag = {}; + uint16_t reserved1 = {}; + uint8_t reserved2 = {}; + uint8_t reserved3 = {}; + uint8_t num_negative_pics = {}; + uint8_t num_positive_pics = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_s0_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_s1_minus1 = {}; + }; + + struct H265LongTermRefPicsSps + { + using NativeType = StdVideoH265LongTermRefPicsSps; + + operator StdVideoH265LongTermRefPicsSps const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265LongTermRefPicsSps &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_by_curr_pic_lt_sps_flag == rhs.used_by_curr_pic_lt_sps_flag ) && ( lt_ref_pic_poc_lsb_sps == rhs.lt_ref_pic_poc_lsb_sps ); + } + + bool operator!=( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_by_curr_pic_lt_sps_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lt_ref_pic_poc_lsb_sps = {}; + }; + + struct H265SequenceParameterSet + { + using NativeType = StdVideoH265SequenceParameterSet; + + operator StdVideoH265SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( chroma_format_idc == rhs.chroma_format_idc ) && ( pic_width_in_luma_samples == rhs.pic_width_in_luma_samples ) && + ( pic_height_in_luma_samples == rhs.pic_height_in_luma_samples ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( sps_max_sub_layers_minus1 == rhs.sps_max_sub_layers_minus1 ) && ( sps_seq_parameter_set_id == rhs.sps_seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && + ( log2_min_luma_coding_block_size_minus3 == rhs.log2_min_luma_coding_block_size_minus3 ) && + ( log2_diff_max_min_luma_coding_block_size == rhs.log2_diff_max_min_luma_coding_block_size ) && + ( log2_min_luma_transform_block_size_minus2 == rhs.log2_min_luma_transform_block_size_minus2 ) && + ( log2_diff_max_min_luma_transform_block_size == rhs.log2_diff_max_min_luma_transform_block_size ) && + ( max_transform_hierarchy_depth_inter == rhs.max_transform_hierarchy_depth_inter ) && + ( max_transform_hierarchy_depth_intra == rhs.max_transform_hierarchy_depth_intra ) && + ( num_short_term_ref_pic_sets == rhs.num_short_term_ref_pic_sets ) && ( num_long_term_ref_pics_sps == rhs.num_long_term_ref_pics_sps ) && + ( pcm_sample_bit_depth_luma_minus1 == rhs.pcm_sample_bit_depth_luma_minus1 ) && + ( pcm_sample_bit_depth_chroma_minus1 == rhs.pcm_sample_bit_depth_chroma_minus1 ) && + ( log2_min_pcm_luma_coding_block_size_minus3 == rhs.log2_min_pcm_luma_coding_block_size_minus3 ) && + ( log2_diff_max_min_pcm_luma_coding_block_size == rhs.log2_diff_max_min_pcm_luma_coding_block_size ) && ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && ( palette_max_size == rhs.palette_max_size ) && + ( delta_palette_max_predictor_size == rhs.delta_palette_max_predictor_size ) && + ( motion_vector_resolution_control_idc == rhs.motion_vector_resolution_control_idc ) && + ( sps_num_palette_predictor_initializers_minus1 == rhs.sps_num_palette_predictor_initializers_minus1 ) && + ( conf_win_left_offset == rhs.conf_win_left_offset ) && ( conf_win_right_offset == rhs.conf_win_right_offset ) && + ( conf_win_top_offset == rhs.conf_win_top_offset ) && ( conf_win_bottom_offset == rhs.conf_win_bottom_offset ) && + ( pProfileTierLevel == rhs.pProfileTierLevel ) && ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pScalingLists == rhs.pScalingLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPicsSps == rhs.pLongTermRefPicsSps ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ) && ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + } + + bool operator!=( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc chroma_format_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc::eMonochrome; + uint32_t pic_width_in_luma_samples = {}; + uint32_t pic_height_in_luma_samples = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t sps_max_sub_layers_minus1 = {}; + uint8_t sps_seq_parameter_set_id = {}; + uint8_t bit_depth_luma_minus8 = {}; + uint8_t bit_depth_chroma_minus8 = {}; + uint8_t log2_max_pic_order_cnt_lsb_minus4 = {}; + uint8_t log2_min_luma_coding_block_size_minus3 = {}; + uint8_t log2_diff_max_min_luma_coding_block_size = {}; + uint8_t log2_min_luma_transform_block_size_minus2 = {}; + uint8_t log2_diff_max_min_luma_transform_block_size = {}; + uint8_t max_transform_hierarchy_depth_inter = {}; + uint8_t max_transform_hierarchy_depth_intra = {}; + uint8_t num_short_term_ref_pic_sets = {}; + uint8_t num_long_term_ref_pics_sps = {}; + uint8_t pcm_sample_bit_depth_luma_minus1 = {}; + uint8_t pcm_sample_bit_depth_chroma_minus1 = {}; + uint8_t log2_min_pcm_luma_coding_block_size_minus3 = {}; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint8_t palette_max_size = {}; + uint8_t delta_palette_max_predictor_size = {}; + uint8_t motion_vector_resolution_control_idc = {}; + uint8_t sps_num_palette_predictor_initializers_minus1 = {}; + uint32_t conf_win_left_offset = {}; + uint32_t conf_win_right_offset = {}; + uint32_t conf_win_top_offset = {}; + uint32_t conf_win_bottom_offset = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LongTermRefPicsSps * pLongTermRefPicsSps = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SequenceParameterSetVui * pSequenceParameterSetVui = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {}; + }; + + struct H265PpsFlags + { + using NativeType = StdVideoH265PpsFlags; + + operator StdVideoH265PpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( dependent_slice_segments_enabled_flag == rhs.dependent_slice_segments_enabled_flag ) && + ( output_flag_present_flag == rhs.output_flag_present_flag ) && ( sign_data_hiding_enabled_flag == rhs.sign_data_hiding_enabled_flag ) && + ( cabac_init_present_flag == rhs.cabac_init_present_flag ) && ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( transform_skip_enabled_flag == rhs.transform_skip_enabled_flag ) && ( cu_qp_delta_enabled_flag == rhs.cu_qp_delta_enabled_flag ) && + ( pps_slice_chroma_qp_offsets_present_flag == rhs.pps_slice_chroma_qp_offsets_present_flag ) && + ( weighted_pred_flag == rhs.weighted_pred_flag ) && ( weighted_bipred_flag == rhs.weighted_bipred_flag ) && + ( transquant_bypass_enabled_flag == rhs.transquant_bypass_enabled_flag ) && ( tiles_enabled_flag == rhs.tiles_enabled_flag ) && + ( entropy_coding_sync_enabled_flag == rhs.entropy_coding_sync_enabled_flag ) && ( uniform_spacing_flag == rhs.uniform_spacing_flag ) && + ( loop_filter_across_tiles_enabled_flag == rhs.loop_filter_across_tiles_enabled_flag ) && + ( pps_loop_filter_across_slices_enabled_flag == rhs.pps_loop_filter_across_slices_enabled_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && + ( deblocking_filter_override_enabled_flag == rhs.deblocking_filter_override_enabled_flag ) && + ( pps_deblocking_filter_disabled_flag == rhs.pps_deblocking_filter_disabled_flag ) && + ( pps_scaling_list_data_present_flag == rhs.pps_scaling_list_data_present_flag ) && + ( lists_modification_present_flag == rhs.lists_modification_present_flag ) && + ( slice_segment_header_extension_present_flag == rhs.slice_segment_header_extension_present_flag ) && + ( pps_extension_present_flag == rhs.pps_extension_present_flag ) && + ( cross_component_prediction_enabled_flag == rhs.cross_component_prediction_enabled_flag ) && + ( chroma_qp_offset_list_enabled_flag == rhs.chroma_qp_offset_list_enabled_flag ) && + ( pps_curr_pic_ref_enabled_flag == rhs.pps_curr_pic_ref_enabled_flag ) && + ( residual_adaptive_colour_transform_enabled_flag == rhs.residual_adaptive_colour_transform_enabled_flag ) && + ( pps_slice_act_qp_offsets_present_flag == rhs.pps_slice_act_qp_offsets_present_flag ) && + ( pps_palette_predictor_initializers_present_flag == rhs.pps_palette_predictor_initializers_present_flag ) && + ( monochrome_palette_flag == rhs.monochrome_palette_flag ) && ( pps_range_extension_flag == rhs.pps_range_extension_flag ); + } + + bool operator!=( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializers_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; + }; + + struct H265PictureParameterSet + { + using NativeType = StdVideoH265PictureParameterSet; + + operator StdVideoH265PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PictureParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( num_extra_slice_header_bits == rhs.num_extra_slice_header_bits ) && + ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( init_qp_minus26 == rhs.init_qp_minus26 ) && + ( diff_cu_qp_delta_depth == rhs.diff_cu_qp_delta_depth ) && ( pps_cb_qp_offset == rhs.pps_cb_qp_offset ) && + ( pps_cr_qp_offset == rhs.pps_cr_qp_offset ) && ( pps_beta_offset_div2 == rhs.pps_beta_offset_div2 ) && + ( pps_tc_offset_div2 == rhs.pps_tc_offset_div2 ) && ( log2_parallel_merge_level_minus2 == rhs.log2_parallel_merge_level_minus2 ) && + ( log2_max_transform_skip_block_size_minus2 == rhs.log2_max_transform_skip_block_size_minus2 ) && + ( diff_cu_chroma_qp_offset_depth == rhs.diff_cu_chroma_qp_offset_depth ) && + ( chroma_qp_offset_list_len_minus1 == rhs.chroma_qp_offset_list_len_minus1 ) && ( cb_qp_offset_list == rhs.cb_qp_offset_list ) && + ( cr_qp_offset_list == rhs.cr_qp_offset_list ) && ( log2_sao_offset_scale_luma == rhs.log2_sao_offset_scale_luma ) && + ( log2_sao_offset_scale_chroma == rhs.log2_sao_offset_scale_chroma ) && ( pps_act_y_qp_offset_plus5 == rhs.pps_act_y_qp_offset_plus5 ) && + ( pps_act_cb_qp_offset_plus5 == rhs.pps_act_cb_qp_offset_plus5 ) && ( pps_act_cr_qp_offset_plus3 == rhs.pps_act_cr_qp_offset_plus3 ) && + ( pps_num_palette_predictor_initializers == rhs.pps_num_palette_predictor_initializers ) && + ( luma_bit_depth_entry_minus8 == rhs.luma_bit_depth_entry_minus8 ) && ( chroma_bit_depth_entry_minus8 == rhs.chroma_bit_depth_entry_minus8 ) && + ( num_tile_columns_minus1 == rhs.num_tile_columns_minus1 ) && ( num_tile_rows_minus1 == rhs.num_tile_rows_minus1 ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( column_width_minus1 == rhs.column_width_minus1 ) && + ( row_height_minus1 == rhs.row_height_minus1 ) && ( reserved3 == rhs.reserved3 ) && ( pScalingLists == rhs.pScalingLists ) && + ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + } + + bool operator!=( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PpsFlags flags = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t num_extra_slice_header_bits = {}; + uint8_t num_ref_idx_l0_default_active_minus1 = {}; + uint8_t num_ref_idx_l1_default_active_minus1 = {}; + int8_t init_qp_minus26 = {}; + uint8_t diff_cu_qp_delta_depth = {}; + int8_t pps_cb_qp_offset = {}; + int8_t pps_cr_qp_offset = {}; + int8_t pps_beta_offset_div2 = {}; + int8_t pps_tc_offset_div2 = {}; + uint8_t log2_parallel_merge_level_minus2 = {}; + uint8_t log2_max_transform_skip_block_size_minus2 = {}; + uint8_t diff_cu_chroma_qp_offset_depth = {}; + uint8_t chroma_qp_offset_list_len_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cb_qp_offset_list = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cr_qp_offset_list = {}; + uint8_t log2_sao_offset_scale_luma = {}; + uint8_t log2_sao_offset_scale_chroma = {}; + int8_t pps_act_y_qp_offset_plus5 = {}; + int8_t pps_act_cb_qp_offset_plus5 = {}; + int8_t pps_act_cr_qp_offset_plus3 = {}; + uint8_t pps_num_palette_predictor_initializers = {}; + uint8_t luma_bit_depth_entry_minus8 = {}; + uint8_t chroma_bit_depth_entry_minus8 = {}; + uint8_t num_tile_columns_minus1 = {}; + uint8_t num_tile_rows_minus1 = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D column_width_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D row_height_minus1 = {}; + uint32_t reserved3 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {}; + }; + + //=== vulkan_video_codec_h265std_decode === + + struct DecodeH265PictureInfoFlags + { + using NativeType = StdVideoDecodeH265PictureInfoFlags; + + operator StdVideoDecodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( IrapPicFlag == rhs.IrapPicFlag ) && ( IdrPicFlag == rhs.IdrPicFlag ) && ( IsReference == rhs.IsReference ) && + ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ); + } + + bool operator!=( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + }; + + struct DecodeH265PictureInfo + { + using NativeType = StdVideoDecodeH265PictureInfo; + + operator StdVideoDecodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( NumDeltaPocsOfRefRpsIdx == rhs.NumDeltaPocsOfRefRpsIdx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( NumBitsForSTRefPicSetInSlice == rhs.NumBitsForSTRefPicSetInSlice ) && ( reserved == rhs.reserved ) && + ( RefPicSetStCurrBefore == rhs.RefPicSetStCurrBefore ) && ( RefPicSetStCurrAfter == rhs.RefPicSetStCurrAfter ) && + ( RefPicSetLtCurr == rhs.RefPicSetLtCurr ); + } + + bool operator!=( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265PictureInfoFlags flags = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t NumDeltaPocsOfRefRpsIdx = {}; + int32_t PicOrderCntVal = {}; + uint16_t NumBitsForSTRefPicSetInSlice = {}; + uint16_t reserved = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetStCurrBefore = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetStCurrAfter = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetLtCurr = {}; + }; + + struct DecodeH265ReferenceInfoFlags + { + using NativeType = StdVideoDecodeH265ReferenceInfoFlags; + + operator StdVideoDecodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ); + } + + bool operator!=( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + }; + + struct DecodeH265ReferenceInfo + { + using NativeType = StdVideoDecodeH265ReferenceInfo; + + operator StdVideoDecodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( PicOrderCntVal == rhs.PicOrderCntVal ); + } + + bool operator!=( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265ReferenceInfoFlags flags = {}; + int32_t PicOrderCntVal = {}; + }; + + //=== vulkan_video_codec_h265std_encode === + + struct EncodeH265WeightTableFlags + { + using NativeType = StdVideoEncodeH265WeightTableFlags; + + operator StdVideoEncodeH265WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTableFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + } + + bool operator!=( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint16_t luma_weight_l0_flag = {}; + uint16_t chroma_weight_l0_flag = {}; + uint16_t luma_weight_l1_flag = {}; + uint16_t chroma_weight_l1_flag = {}; + }; + + struct EncodeH265WeightTable + { + using NativeType = StdVideoEncodeH265WeightTable; + + operator StdVideoEncodeH265WeightTable const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTable &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( delta_chroma_log2_weight_denom == rhs.delta_chroma_log2_weight_denom ) && ( delta_luma_weight_l0 == rhs.delta_luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( delta_chroma_weight_l0 == rhs.delta_chroma_weight_l0 ) && + ( delta_chroma_offset_l0 == rhs.delta_chroma_offset_l0 ) && ( delta_luma_weight_l1 == rhs.delta_luma_weight_l1 ) && + ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( delta_chroma_weight_l1 == rhs.delta_chroma_weight_l1 ) && + ( delta_chroma_offset_l1 == rhs.delta_chroma_offset_l1 ); + } + + bool operator!=( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTableFlags flags = {}; + uint8_t luma_log2_weight_denom = {}; + int8_t delta_chroma_log2_weight_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_luma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_luma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_offset_l1 = {}; + }; + + struct EncodeH265SliceSegmentHeaderFlags + { + using NativeType = StdVideoEncodeH265SliceSegmentHeaderFlags; + + operator StdVideoEncodeH265SliceSegmentHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( first_slice_segment_in_pic_flag == rhs.first_slice_segment_in_pic_flag ) && + ( dependent_slice_segment_flag == rhs.dependent_slice_segment_flag ) && ( slice_sao_luma_flag == rhs.slice_sao_luma_flag ) && + ( slice_sao_chroma_flag == rhs.slice_sao_chroma_flag ) && ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && + ( mvd_l1_zero_flag == rhs.mvd_l1_zero_flag ) && ( cabac_init_flag == rhs.cabac_init_flag ) && + ( cu_chroma_qp_offset_enabled_flag == rhs.cu_chroma_qp_offset_enabled_flag ) && + ( deblocking_filter_override_flag == rhs.deblocking_filter_override_flag ) && + ( slice_deblocking_filter_disabled_flag == rhs.slice_deblocking_filter_disabled_flag ) && + ( collocated_from_l0_flag == rhs.collocated_from_l0_flag ) && + ( slice_loop_filter_across_slices_enabled_flag == rhs.slice_loop_filter_across_slices_enabled_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t first_slice_segment_in_pic_flag : 1; + uint32_t dependent_slice_segment_flag : 1; + uint32_t slice_sao_luma_flag : 1; + uint32_t slice_sao_chroma_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t mvd_l1_zero_flag : 1; + uint32_t cabac_init_flag : 1; + uint32_t cu_chroma_qp_offset_enabled_flag : 1; + uint32_t deblocking_filter_override_flag : 1; + uint32_t slice_deblocking_filter_disabled_flag : 1; + uint32_t collocated_from_l0_flag : 1; + uint32_t slice_loop_filter_across_slices_enabled_flag : 1; + uint32_t reserved : 20; + }; + + struct EncodeH265SliceSegmentHeader + { + using NativeType = StdVideoEncodeH265SliceSegmentHeader; + + operator StdVideoEncodeH265SliceSegmentHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( slice_type == rhs.slice_type ) && ( slice_segment_address == rhs.slice_segment_address ) && + ( collocated_ref_idx == rhs.collocated_ref_idx ) && ( MaxNumMergeCand == rhs.MaxNumMergeCand ) && + ( slice_cb_qp_offset == rhs.slice_cb_qp_offset ) && ( slice_cr_qp_offset == rhs.slice_cr_qp_offset ) && + ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && ( slice_tc_offset_div2 == rhs.slice_tc_offset_div2 ) && + ( slice_act_y_qp_offset == rhs.slice_act_y_qp_offset ) && ( slice_act_cb_qp_offset == rhs.slice_act_cb_qp_offset ) && + ( slice_act_cr_qp_offset == rhs.slice_act_cr_qp_offset ) && ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && + ( pWeightTable == rhs.pWeightTable ); + } + + bool operator!=( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265SliceSegmentHeaderFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType::eB; + uint32_t slice_segment_address = {}; + uint8_t collocated_ref_idx = {}; + uint8_t MaxNumMergeCand = {}; + int8_t slice_cb_qp_offset = {}; + int8_t slice_cr_qp_offset = {}; + int8_t slice_beta_offset_div2 = {}; + int8_t slice_tc_offset_div2 = {}; + int8_t slice_act_y_qp_offset = {}; + int8_t slice_act_cb_qp_offset = {}; + int8_t slice_act_cr_qp_offset = {}; + int8_t slice_qp_delta = {}; + uint16_t reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTable * pWeightTable = {}; + }; + + struct EncodeH265ReferenceListsInfoFlags + { + using NativeType = StdVideoEncodeH265ReferenceListsInfoFlags; + + operator StdVideoEncodeH265ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; + }; + + struct EncodeH265ReferenceListsInfo + { + using NativeType = StdVideoEncodeH265ReferenceListsInfo; + + operator StdVideoEncodeH265ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && ( list_entry_l0 == rhs.list_entry_l0 ) && ( list_entry_l1 == rhs.list_entry_l1 ); + } + + bool operator!=( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfoFlags flags = {}; + uint8_t num_ref_idx_l0_active_minus1 = {}; + uint8_t num_ref_idx_l1_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D list_entry_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D list_entry_l1 = {}; + }; + + struct EncodeH265PictureInfoFlags + { + using NativeType = StdVideoEncodeH265PictureInfoFlags; + + operator StdVideoEncodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( is_reference == rhs.is_reference ) && ( IrapPicFlag == rhs.IrapPicFlag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( discardable_flag == rhs.discardable_flag ) && + ( cross_layer_bla_flag == rhs.cross_layer_bla_flag ) && ( pic_output_flag == rhs.pic_output_flag ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && + ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ) && + ( slice_temporal_mvp_enabled_flag == rhs.slice_temporal_mvp_enabled_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t is_reference : 1; + uint32_t IrapPicFlag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t discardable_flag : 1; + uint32_t cross_layer_bla_flag : 1; + uint32_t pic_output_flag : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + uint32_t slice_temporal_mvp_enabled_flag : 1; + uint32_t reserved : 23; + }; + + struct EncodeH265LongTermRefPics + { + using NativeType = StdVideoEncodeH265LongTermRefPics; + + operator StdVideoEncodeH265LongTermRefPics const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265LongTermRefPics &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( num_long_term_sps == rhs.num_long_term_sps ) && ( num_long_term_pics == rhs.num_long_term_pics ) && ( lt_idx_sps == rhs.lt_idx_sps ) && + ( poc_lsb_lt == rhs.poc_lsb_lt ) && ( used_by_curr_pic_lt_flag == rhs.used_by_curr_pic_lt_flag ) && + ( delta_poc_msb_present_flag == rhs.delta_poc_msb_present_flag ) && ( delta_poc_msb_cycle_lt == rhs.delta_poc_msb_cycle_lt ); + } + + bool operator!=( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t num_long_term_sps = {}; + uint8_t num_long_term_pics = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lt_idx_sps = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D poc_lsb_lt = {}; + uint16_t used_by_curr_pic_lt_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_msb_present_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_msb_cycle_lt = {}; + }; + + struct EncodeH265PictureInfo + { + using NativeType = StdVideoEncodeH265PictureInfo; + + operator StdVideoEncodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( short_term_ref_pic_set_idx == rhs.short_term_ref_pic_set_idx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( TemporalId == rhs.TemporalId ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPics == rhs.pLongTermRefPics ); + } + + bool operator!=( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP; + uint8_t sps_video_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t short_term_ref_pic_set_idx = {}; + int32_t PicOrderCntVal = {}; + uint8_t TemporalId = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfo * pRefLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265LongTermRefPics * pLongTermRefPics = {}; + }; + + struct EncodeH265ReferenceInfoFlags + { + using NativeType = StdVideoEncodeH265ReferenceInfoFlags; + + operator StdVideoEncodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + uint32_t reserved : 30; + }; + + struct EncodeH265ReferenceInfo + { + using NativeType = StdVideoEncodeH265ReferenceInfo; + + operator StdVideoEncodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && ( TemporalId == rhs.TemporalId ); + } + + bool operator!=( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP; + int32_t PicOrderCntVal = {}; + uint8_t TemporalId = {}; + }; + + } // namespace VULKAN_HPP_VIDEO_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_wayland.h b/MacroLibX/third_party/vulkan/vulkan_wayland.h new file mode 100644 index 0000000..c93b217 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_wayland.h @@ -0,0 +1,55 @@ +#ifndef VULKAN_WAYLAND_H_ +#define VULKAN_WAYLAND_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_wayland_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_wayland_surface 1 +#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" +typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; +typedef struct VkWaylandSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; +} VkWaylandSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( + VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_win32.h b/MacroLibX/third_party/vulkan/vulkan_win32.h new file mode 100644 index 0000000..fae3b85 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_win32.h @@ -0,0 +1,342 @@ +#ifndef VULKAN_WIN32_H_ +#define VULKAN_WIN32_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_win32_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_win32_surface 1 +#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" +typedef VkFlags VkWin32SurfaceCreateFlagsKHR; +typedef struct VkWin32SurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWin32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; +} VkWin32SurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( + VkInstance instance, + const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex); +#endif + + +// VK_KHR_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_win32 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" +typedef struct VkImportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportMemoryWin32HandleInfoKHR; + +typedef struct VkExportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportMemoryWin32HandleInfoKHR; + +typedef struct VkMemoryWin32HandlePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryWin32HandlePropertiesKHR; + +typedef struct VkMemoryGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( + VkDevice device, + const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); +#endif + + +// VK_KHR_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_win32_keyed_mutex 1 +#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" +typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoKHR; + + + +// VK_KHR_external_semaphore_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_win32 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" +typedef struct VkImportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportSemaphoreWin32HandleInfoKHR; + +typedef struct VkExportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportSemaphoreWin32HandleInfoKHR; + +typedef struct VkD3D12FenceSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; +} VkD3D12FenceSubmitInfoKHR; + +typedef struct VkSemaphoreGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( + VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + + +// VK_KHR_external_fence_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_win32 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" +typedef struct VkImportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportFenceWin32HandleInfoKHR; + +typedef struct VkExportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportFenceWin32HandleInfoKHR; + +typedef struct VkFenceGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( + VkDevice device, + const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( + VkDevice device, + const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + + +// VK_NV_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_win32 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" +typedef struct VkImportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; +} VkImportMemoryWin32HandleInfoNV; + +typedef struct VkExportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; +} VkExportMemoryWin32HandleInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( + VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE* pHandle); +#endif + + +// VK_NV_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_win32_keyed_mutex 1 +#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2 +#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" +typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoNV; + + + +// VK_EXT_full_screen_exclusive is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_full_screen_exclusive 1 +#define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4 +#define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive" + +typedef enum VkFullScreenExclusiveEXT { + VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT = 0, + VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1, + VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2, + VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3, + VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkFullScreenExclusiveEXT; +typedef struct VkSurfaceFullScreenExclusiveInfoEXT { + VkStructureType sType; + void* pNext; + VkFullScreenExclusiveEXT fullScreenExclusive; +} VkSurfaceFullScreenExclusiveInfoEXT; + +typedef struct VkSurfaceCapabilitiesFullScreenExclusiveEXT { + VkStructureType sType; + void* pNext; + VkBool32 fullScreenExclusiveSupported; +} VkSurfaceCapabilitiesFullScreenExclusiveEXT; + +typedef struct VkSurfaceFullScreenExclusiveWin32InfoEXT { + VkStructureType sType; + const void* pNext; + HMONITOR hmonitor; +} VkSurfaceFullScreenExclusiveWin32InfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain); +typedef VkResult (VKAPI_PTR *PFN_vkReleaseFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModes2EXT)(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModes2EXT( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireFullScreenExclusiveModeEXT( + VkDevice device, + VkSwapchainKHR swapchain); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseFullScreenExclusiveModeEXT( + VkDevice device, + VkSwapchainKHR swapchain); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT( + VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR* pModes); +#endif + + +// VK_NV_acquire_winrt_display is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_acquire_winrt_display 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_xcb.h b/MacroLibX/third_party/vulkan/vulkan_xcb.h new file mode 100644 index 0000000..de74055 --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_xcb.h @@ -0,0 +1,56 @@ +#ifndef VULKAN_XCB_H_ +#define VULKAN_XCB_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_xcb_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_xcb_surface 1 +#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" +typedef VkFlags VkXcbSurfaceCreateFlagsKHR; +typedef struct VkXcbSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; +} VkXcbSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( + VkInstance instance, + const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_xlib.h b/MacroLibX/third_party/vulkan/vulkan_xlib.h new file mode 100644 index 0000000..1aa632f --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_xlib.h @@ -0,0 +1,56 @@ +#ifndef VULKAN_XLIB_H_ +#define VULKAN_XLIB_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_xlib_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_xlib_surface 1 +#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" +typedef VkFlags VkXlibSurfaceCreateFlagsKHR; +typedef struct VkXlibSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; +} VkXlibSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( + VkInstance instance, + const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/third_party/vulkan/vulkan_xlib_xrandr.h b/MacroLibX/third_party/vulkan/vulkan_xlib_xrandr.h new file mode 100644 index 0000000..e164ffc --- /dev/null +++ b/MacroLibX/third_party/vulkan/vulkan_xlib_xrandr.h @@ -0,0 +1,46 @@ +#ifndef VULKAN_XLIB_XRANDR_H_ +#define VULKAN_XLIB_XRANDR_H_ 1 + +/* +** Copyright 2015-2023 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_acquire_xlib_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_acquire_xlib_display 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + RROutput rrOutput, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/MacroLibX/valgrind.supp b/MacroLibX/valgrind.supp new file mode 100644 index 0000000..d81f800 --- /dev/null +++ b/MacroLibX/valgrind.supp @@ -0,0 +1,97 @@ + + name + Memcheck:Leak + fun:*alloc + ... + obj:*libmlx* + ... +} +{ + name + Memcheck:Leak + fun:*alloc + ... + obj:*SDL* + ... +} +{ + name + Memcheck:Leak + fun:*alloc + ... + obj:*X11* + ... +} +{ + name + Memcheck:Leak + fun:*alloc + ... + obj:*nvidia.so* + ... +} +{ + name + Memcheck:Leak + fun:*alloc + obj:* + ... + fun:X11* + ... +} +{ + name + Memcheck:Leak + fun:*alloc + obj:* + ... + fun:SDL* + ... +} +{ + name + Memcheck:Leak + fun:*alloc + obj:* + ... + fun:dl_* + ... +} +{ + name + Memcheck:Addr8 + fun:* + obj:* + ... + fun:dl_* + ... +} +{ + name + Memcheck:Leak + fun:calloc + obj:* +} +{ + name + Memcheck:Leak + fun:realloc + obj:* +} +{ + name + Memcheck:Leak + fun:malloc + obj:* + obj:* + obj:* + obj:* + obj:* + obj:* + obj:* + obj:* + obj:* + obj:* + obj:* +} + diff --git a/MacroLibX/xmake.lua b/MacroLibX/xmake.lua new file mode 100644 index 0000000..0a71c0c --- /dev/null +++ b/MacroLibX/xmake.lua @@ -0,0 +1,78 @@ +-------------------------------------------------------------------------------- +-- -- +-- ::: :::::::: -- +-- xmake.lua :+: :+: :+: -- +-- +:+ +:+ +:+ -- +-- By: maldavid +#+ +:+ +#+ -- +-- +#+#+#+#+#+ +#+ -- +-- Created: 2023/12/07 15:21:38 by kbz_8 #+# #+# -- +-- Updated: 2024/01/02 23:40:20 by kbz_8 ### ########.fr -- +-- -- +-------------------------------------------------------------------------------- + +-- Global settings + +add_requires("libsdl", {configs = { sdlmain = false }}) + +add_rules("mode.debug", "mode.release") +set_languages("cxx17", "c99") + +set_objectdir("objs/xmake/$(os)_$(arch)") +set_targetdir("./") + +set_optimize("fastest") + +-- Options + +option("images_optimized") + set_default(true) + add_defines("IMAGE_OPTIMIZED") +option_end() + +option("force_integrated_gpu") + set_default(false) + add_defines("FORCE_INTEGRATED_GPU") +option_end() + +option("graphics_memory_dump") + set_default(false) + add_defines("GRAPHICS_MEMORY_DUMP") +option_end() + +-- Targets + +target("mlx") + set_default(true) + set_license("MIT") + set_kind("shared") + add_options("images_optimized") + add_options("force_integrated_gpu") + add_options("graphics_memory_dump") + add_includedirs("includes", "src", "third_party") + + add_defines("MLX_BUILD", "SDL_MAIN_HANDLED") + + add_files("src/**.cpp") + + add_packages("libsdl") + + if is_mode("debug") then + add_defines("DEBUG") + end +target_end() -- optional but I think the code is cleaner with this -- optional but I think the code is cleaner with this + +target("Test") + set_default(false) + set_kind("binary") + set_targetdir("test") + + add_linkdirs("./") + + add_deps("mlx") + + add_files("example/main.c") + + add_defines("SDL_MAIN_HANDLED") + + add_packages("libsdl") +target_end()